diff --git a/.flake8 b/.flake8 index fd55a842c0..131b6eb1ff 100644 --- a/.flake8 +++ b/.flake8 @@ -15,6 +15,8 @@ ignore = E402, # E501: line too long E501, + # E731: do not assign a lambda expression, use a def + E731, # W503: line break before binary operator W503, # W504: line break after binary operator diff --git a/docs/iris/src/userguide/cube_maths.rst b/docs/iris/src/userguide/cube_maths.rst index 6af4d5b3a6..0ac2b8da74 100644 --- a/docs/iris/src/userguide/cube_maths.rst +++ b/docs/iris/src/userguide/cube_maths.rst @@ -60,6 +60,10 @@ but with the data representing their difference: Scalar coordinates: forecast_reference_time: 1859-09-01 06:00:00 height: 1.5 m + Attributes: + Conventions: CF-1.5 + Model scenario: E1 + source: Data from Met Office Unified Model 6.05 .. note:: diff --git a/lib/iris/_concatenate.py b/lib/iris/_concatenate.py index d19c8047b6..13e4bfc813 100644 --- a/lib/iris/_concatenate.py +++ b/lib/iris/_concatenate.py @@ -403,11 +403,11 @@ def __init__(self, cube): axes = dict(T=0, Z=1, Y=2, X=3) # Coordinate sort function - by guessed coordinate axis, then - # by coordinate metadata, then by dimensions, in ascending order. + # by coordinate name, then by dimensions, in ascending order. def key_func(coord): return ( axes.get(guess_coord_axis(coord), len(axes) + 1), - coord.metadata, + coord.name(), cube.coord_dims(coord), ) @@ -990,6 +990,9 @@ def _build_aux_coordinates(self): points, bounds=bnds, **kwargs ) except ValueError: + # Ensure to remove the "circular" kwarg, which may be + # present in the defn of a DimCoord being demoted. + _ = kwargs.pop("circular", None) coord = iris.coords.AuxCoord( points, bounds=bnds, **kwargs ) diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index 0de97b02f3..3a38b3b283 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -10,22 +10,27 @@ from functools import lru_cache import inspect +import logging import math import operator import warnings import cf_units +import dask.array as da import numpy as np from numpy import ma import iris.analysis +from iris.common import SERVICES, Resolve +from iris.common.lenient import _lenient_client import iris.coords import iris.cube import iris.exceptions import iris.util -import dask.array as da -from dask.array.core import broadcast_shapes + +# Configure the logger. +logger = logging.getLogger(__name__) @lru_cache(maxsize=128, typed=True) @@ -115,7 +120,9 @@ def abs(cube, in_place=False): _assert_is_cube(cube) new_dtype = _output_dtype(np.abs, cube.dtype, in_place=in_place) op = da.absolute if cube.has_lazy_data() else np.abs - return _math_op_common(cube, op, cube.units, new_dtype, in_place=in_place) + return _math_op_common( + cube, op, cube.units, new_dtype=new_dtype, in_place=in_place + ) def intersection_of_cubes(cube, other_cube): @@ -179,43 +186,7 @@ def _assert_is_cube(cube): ) -def _assert_compatible(cube, other): - """ - Checks to see if cube.data and another array can be broadcast to - the same shape. - - """ - try: - new_shape = broadcast_shapes(cube.shape, other.shape) - except ValueError as err: - # re-raise - raise ValueError( - "The array was not broadcastable to the cube's data " - "shape. The error message when " - "broadcasting:\n{}\nThe cube's shape was {} and the " - "array's shape was {}".format(err, cube.shape, other.shape) - ) - - if cube.shape != new_shape: - raise ValueError( - "The array operation would increase the size or " - "dimensionality of the cube. The new cube's data " - "would have had to become: {}".format(new_shape) - ) - - -def _assert_matching_units(cube, other, operation_name): - """ - Check that the units of the cube and the other item are the same, or if - the other does not have a unit, skip this test - """ - if cube.units != getattr(other, "units", cube.units): - msg = "Cannot use {!r} with differing units ({} & {})".format( - operation_name, cube.units, other.units - ) - raise iris.exceptions.NotYetImplementedError(msg) - - +@_lenient_client(services=SERVICES) def add(cube, other, dim=None, in_place=False): """ Calculate the sum of two cubes, or the sum of a cube and a @@ -249,7 +220,10 @@ def add(cube, other, dim=None, in_place=False): """ _assert_is_cube(cube) new_dtype = _output_dtype( - operator.add, cube.dtype, _get_dtype(other), in_place=in_place + operator.add, + cube.dtype, + second_dtype=_get_dtype(other), + in_place=in_place, ) if in_place: _inplace_common_checks(cube, other, "addition") @@ -261,6 +235,7 @@ def add(cube, other, dim=None, in_place=False): ) +@_lenient_client(services=SERVICES) def subtract(cube, other, dim=None, in_place=False): """ Calculate the difference between two cubes, or the difference between @@ -294,7 +269,10 @@ def subtract(cube, other, dim=None, in_place=False): """ _assert_is_cube(cube) new_dtype = _output_dtype( - operator.sub, cube.dtype, _get_dtype(other), in_place=in_place + operator.sub, + cube.dtype, + second_dtype=_get_dtype(other), + in_place=in_place, ) if in_place: _inplace_common_checks(cube, other, "subtraction") @@ -335,30 +313,15 @@ def _add_subtract_common( """ _assert_is_cube(cube) - _assert_matching_units(cube, other, operation_name) - - if isinstance(other, iris.cube.Cube): - # get a coordinate comparison of this cube and the cube to do the - # operation with - coord_comp = iris.analysis._dimensional_metadata_comparison( - cube, other - ) - bad_coord_grps = ( - coord_comp["ungroupable_and_dimensioned"] - + coord_comp["resamplable"] + if cube.units != getattr(other, "units", cube.units): + emsg = ( + f"Cannot use {operation_name!r} with differing units " + f"({cube.units} & {other.units})" ) - if bad_coord_grps: - raise ValueError( - "This operation cannot be performed as there are " - "differing coordinates (%s) remaining " - "which cannot be ignored." - % ", ".join({coord_grp.name() for coord_grp in bad_coord_grps}) - ) - else: - coord_comp = None + raise iris.exceptions.NotYetImplementedError(emsg) - new_cube = _binary_op_common( + result = _binary_op_common( operation_function, operation_name, cube, @@ -369,17 +332,10 @@ def _add_subtract_common( in_place=in_place, ) - if coord_comp: - # If a coordinate is to be ignored - remove it - ignore = filter( - None, [coord_grp[0] for coord_grp in coord_comp["ignorable"]] - ) - for coord in ignore: - new_cube.remove_coord(coord) - - return new_cube + return result +@_lenient_client(services=SERVICES) def multiply(cube, other, dim=None, in_place=False): """ Calculate the product of a cube and another cube or coordinate. @@ -403,38 +359,23 @@ def multiply(cube, other, dim=None, in_place=False): """ _assert_is_cube(cube) + new_dtype = _output_dtype( - operator.mul, cube.dtype, _get_dtype(other), in_place=in_place + operator.mul, + cube.dtype, + second_dtype=_get_dtype(other), + in_place=in_place, ) other_unit = getattr(other, "units", "1") new_unit = cube.units * other_unit + if in_place: _inplace_common_checks(cube, other, "multiplication") op = operator.imul else: op = operator.mul - if isinstance(other, iris.cube.Cube): - # get a coordinate comparison of this cube and the cube to do the - # operation with - coord_comp = iris.analysis._dimensional_metadata_comparison( - cube, other - ) - bad_coord_grps = ( - coord_comp["ungroupable_and_dimensioned"] - + coord_comp["resamplable"] - ) - if bad_coord_grps: - raise ValueError( - "This operation cannot be performed as there are " - "differing coordinates (%s) remaining " - "which cannot be ignored." - % ", ".join({coord_grp.name() for coord_grp in bad_coord_grps}) - ) - else: - coord_comp = None - - new_cube = _binary_op_common( + result = _binary_op_common( op, "multiply", cube, @@ -445,15 +386,7 @@ def multiply(cube, other, dim=None, in_place=False): in_place=in_place, ) - if coord_comp: - # If a coordinate is to be ignored - remove it - ignore = filter( - None, [coord_grp[0] for coord_grp in coord_comp["ignorable"]] - ) - for coord in ignore: - new_cube.remove_coord(coord) - - return new_cube + return result def _inplace_common_checks(cube, other, math_op): @@ -475,6 +408,7 @@ def _inplace_common_checks(cube, other, math_op): ) +@_lenient_client(services=SERVICES) def divide(cube, other, dim=None, in_place=False): """ Calculate the division of a cube by a cube or coordinate. @@ -498,44 +432,29 @@ def divide(cube, other, dim=None, in_place=False): """ _assert_is_cube(cube) + new_dtype = _output_dtype( - operator.truediv, cube.dtype, _get_dtype(other), in_place=in_place + operator.truediv, + cube.dtype, + second_dtype=_get_dtype(other), + in_place=in_place, ) other_unit = getattr(other, "units", "1") new_unit = cube.units / other_unit + if in_place: if cube.dtype.kind in "iu": # Cannot coerce float result from inplace division back to int. - aemsg = ( - "Cannot perform inplace division of cube {!r} " + emsg = ( + f"Cannot perform inplace division of cube {cube.name()!r} " "with integer data." ) - raise ArithmeticError(aemsg) + raise ArithmeticError(emsg) op = operator.itruediv else: op = operator.truediv - if isinstance(other, iris.cube.Cube): - # get a coordinate comparison of this cube and the cube to do the - # operation with - coord_comp = iris.analysis._dimensional_metadata_comparison( - cube, other - ) - bad_coord_grps = ( - coord_comp["ungroupable_and_dimensioned"] - + coord_comp["resamplable"] - ) - if bad_coord_grps: - raise ValueError( - "This operation cannot be performed as there are " - "differing coordinates (%s) remaining " - "which cannot be ignored." - % ", ".join({coord_grp.name() for coord_grp in bad_coord_grps}) - ) - else: - coord_comp = None - - new_cube = _binary_op_common( + result = _binary_op_common( op, "divide", cube, @@ -546,15 +465,7 @@ def divide(cube, other, dim=None, in_place=False): in_place=in_place, ) - if coord_comp: - # If a coordinate is to be ignored - remove it - ignore = filter( - None, [coord_grp[0] for coord_grp in coord_comp["ignorable"]] - ) - for coord in ignore: - new_cube.remove_coord(coord) - - return new_cube + return result def exponentiate(cube, exponent, in_place=False): @@ -585,7 +496,10 @@ def exponentiate(cube, exponent, in_place=False): """ _assert_is_cube(cube) new_dtype = _output_dtype( - operator.pow, cube.dtype, _get_dtype(exponent), in_place=in_place + operator.pow, + cube.dtype, + second_dtype=_get_dtype(exponent), + in_place=in_place, ) if cube.has_lazy_data(): @@ -598,7 +512,11 @@ def power(data, out=None): return np.power(data, exponent, out) return _math_op_common( - cube, power, cube.units ** exponent, new_dtype, in_place=in_place + cube, + power, + cube.units ** exponent, + new_dtype=new_dtype, + in_place=in_place, ) @@ -628,7 +546,7 @@ def exp(cube, in_place=False): new_dtype = _output_dtype(np.exp, cube.dtype, in_place=in_place) op = da.exp if cube.has_lazy_data() else np.exp return _math_op_common( - cube, op, cf_units.Unit("1"), new_dtype, in_place=in_place + cube, op, cf_units.Unit("1"), new_dtype=new_dtype, in_place=in_place ) @@ -654,7 +572,11 @@ def log(cube, in_place=False): new_dtype = _output_dtype(np.log, cube.dtype, in_place=in_place) op = da.log if cube.has_lazy_data() else np.log return _math_op_common( - cube, op, cube.units.log(math.e), new_dtype, in_place=in_place + cube, + op, + cube.units.log(math.e), + new_dtype=new_dtype, + in_place=in_place, ) @@ -680,7 +602,7 @@ def log2(cube, in_place=False): new_dtype = _output_dtype(np.log2, cube.dtype, in_place=in_place) op = da.log2 if cube.has_lazy_data() else np.log2 return _math_op_common( - cube, op, cube.units.log(2), new_dtype, in_place=in_place + cube, op, cube.units.log(2), new_dtype=new_dtype, in_place=in_place ) @@ -706,12 +628,12 @@ def log10(cube, in_place=False): new_dtype = _output_dtype(np.log10, cube.dtype, in_place=in_place) op = da.log10 if cube.has_lazy_data() else np.log10 return _math_op_common( - cube, op, cube.units.log(10), new_dtype, in_place=in_place + cube, op, cube.units.log(10), new_dtype=new_dtype, in_place=in_place ) def apply_ufunc( - ufunc, cube, other_cube=None, new_unit=None, new_name=None, in_place=False + ufunc, cube, other=None, new_unit=None, new_name=None, in_place=False ): """ Apply a `numpy universal function @@ -735,7 +657,7 @@ def apply_ufunc( Kwargs: - * other_cube: + * other: An instance of :class:`iris.cube.Cube` to be given as the second argument to :func:`numpy.ufunc`. @@ -758,51 +680,59 @@ def apply_ufunc( """ if not isinstance(ufunc, np.ufunc): - name = getattr(ufunc, "__name__", "function passed to apply_ufunc") - - raise TypeError( - "{} is not recognised (it is not an instance of " - "numpy.ufunc)".format(name) + ufunc_name = getattr( + ufunc, "__name__", "function passed to apply_ufunc" ) + emsg = f"{ufunc_name} is not recognised, it is not an instance of numpy.ufunc" + raise TypeError(emsg) + + ufunc_name = ufunc.__name__ if ufunc.nout != 1: - raise ValueError( - "{} returns {} objects, apply_ufunc currently " - "only supports ufunc functions returning a single " - "object.".format(ufunc.__name__, ufunc.nout) + emsg = ( + f"{ufunc_name} returns {ufunc.nout} objects, apply_ufunc currently " + "only supports numpy.ufunc functions returning a single object." ) + raise ValueError(emsg) - if ufunc.nin == 2: - if other_cube is None: - raise ValueError( - "{} requires two arguments, so other_cube " - "must also be passed to apply_ufunc".format(ufunc.__name__) + if ufunc.nin == 1: + if other is not None: + dmsg = ( + "ignoring surplus 'other' argument to apply_ufunc, " + f"provided ufunc {ufunc_name!r} only requires 1 input" ) + logger.debug(dmsg) - _assert_is_cube(other_cube) + new_dtype = _output_dtype(ufunc, cube.dtype, in_place=in_place) + + new_cube = _math_op_common( + cube, ufunc, new_unit, new_dtype=new_dtype, in_place=in_place + ) + elif ufunc.nin == 2: + if other is None: + emsg = ( + f"{ufunc_name} requires two arguments, another cube " + "must also be passed to apply_ufunc." + ) + raise ValueError(emsg) + + _assert_is_cube(other) new_dtype = _output_dtype( - ufunc, cube.dtype, other_cube.dtype, in_place=in_place + ufunc, cube.dtype, second_dtype=other.dtype, in_place=in_place ) new_cube = _binary_op_common( ufunc, - ufunc.__name__, + ufunc_name, cube, - other_cube, + other, new_unit, new_dtype=new_dtype, in_place=in_place, ) - - elif ufunc.nin == 1: - new_dtype = _output_dtype(ufunc, cube.dtype, in_place=in_place) - - new_cube = _math_op_common( - cube, ufunc, new_unit, new_dtype, in_place=in_place - ) - else: - raise ValueError(ufunc.__name__ + ".nin should be 1 or 2.") + emsg = f"Provided ufunc '{ufunc_name}.nin' must be 1 or 2." + raise ValueError(emsg) new_cube.rename(new_name) @@ -838,39 +768,63 @@ def _binary_op_common( `cube` and `cube.data` """ _assert_is_cube(cube) + + # Flag to notify the _math_op_common function to simply wrap the resultant + # data of the maths operation in a cube with no metadata. + skeleton_cube = False + if isinstance(other, iris.coords.Coord): - other = _broadcast_cube_coord_data(cube, other, operation_name, dim) + # The rhs must be an array. + rhs = _broadcast_cube_coord_data(cube, other, operation_name, dim=dim) elif isinstance(other, iris.cube.Cube): - try: - broadcast_shapes(cube.shape, other.shape) - except ValueError: - other = iris.util.as_compatible_shape(other, cube) - other = other.core_data() - else: - other = np.asanyarray(other) + # Prepare to resolve the cube operands and associated coordinate + # metadata into the resultant cube. + resolver = Resolve(cube, other) + + # Get the broadcast, auto-transposed safe versions of the cube operands. + cube = resolver.lhs_cube_resolved + other = resolver.rhs_cube_resolved - # don't worry about checking for other data types (such as scalars or - # np.ndarrays) because _assert_compatible validates that they are broadcast - # compatible with cube.data - _assert_compatible(cube, other) + # Flag that it's safe to wrap the resultant data of the math operation + # in a cube with no metadata, as all of the metadata of the resultant + # cube is being managed by the resolver. + skeleton_cube = True - def unary_func(x): - ret = operation_function(x, other) - if ret is NotImplemented: - # explicitly raise the TypeError, so it gets raised even if, for + # The rhs must be an array. + rhs = other.core_data() + else: + # The rhs must be an array. + rhs = np.asanyarray(other) + + def unary_func(lhs): + data = operation_function(lhs, rhs) + if data is NotImplemented: + # Explicitly raise the TypeError, so it gets raised even if, for # example, `iris.analysis.maths.multiply(cube, other)` is called - # directly instead of `cube * other` - raise TypeError( - "cannot %s %r and %r objects" - % ( - operation_function.__name__, - type(x).__name__, - type(other).__name__, - ) + # directly instead of `cube * other`. + emsg = ( + f"Cannot {operation_function.__name__} {type(lhs).__name__!r} " + f"and {type(rhs).__name__} objects." ) - return ret + raise TypeError(emsg) + return data + + result = _math_op_common( + cube, + unary_func, + new_unit, + new_dtype=new_dtype, + in_place=in_place, + skeleton_cube=skeleton_cube, + ) - return _math_op_common(cube, unary_func, new_unit, new_dtype, in_place) + if isinstance(other, iris.cube.Cube): + # Insert the resultant data from the maths operation + # within the resolved cube. + result = resolver.cube(result.core_data(), in_place=in_place) + _sanitise_metadata(result, new_unit) + + return result def _broadcast_cube_coord_data(cube, other, operation_name, dim=None): @@ -915,26 +869,64 @@ def _broadcast_cube_coord_data(cube, other, operation_name, dim=None): return points +def _sanitise_metadata(cube, unit): + """ + As part of the maths metadata contract, clear the necessary or + unsupported metadata from the resultant cube of the maths operation. + + """ + # Clear the cube names. + cube.rename(None) + + # Clear the cube cell methods. + cube.cell_methods = None + + # Clear the cell measures. + for cm in cube.cell_measures(): + cube.remove_cell_measure(cm) + + # Clear the ancillary variables. + for av in cube.ancillary_variables(): + cube.remove_ancillary_variable(av) + + # Clear the STASH attribute, if present. + if "STASH" in cube.attributes: + del cube.attributes["STASH"] + + # Set the cube units. + cube.units = unit + + def _math_op_common( - cube, operation_function, new_unit, new_dtype=None, in_place=False + cube, + operation_function, + new_unit, + new_dtype=None, + in_place=False, + skeleton_cube=False, ): _assert_is_cube(cube) - if in_place: - new_cube = cube + if in_place and not skeleton_cube: if cube.has_lazy_data(): - new_cube.data = operation_function(cube.lazy_data()) + cube.data = operation_function(cube.lazy_data()) else: try: operation_function(cube.data, out=cube.data) except TypeError: - # Non ufunc function + # Non-ufunc function operation_function(cube.data) + new_cube = cube else: - new_cube = cube.copy(data=operation_function(cube.core_data())) + data = operation_function(cube.core_data()) + if skeleton_cube: + # Simply wrap the resultant data in a cube, as no + # cube metadata is required by the caller. + new_cube = iris.cube.Cube(data) + else: + new_cube = cube.copy(data) - # If the result of the operation is scalar and masked, we need to fix up - # the dtype + # If the result of the operation is scalar and masked, we need to fix-up the dtype. if ( new_dtype is not None and not new_cube.has_lazy_data() @@ -943,8 +935,8 @@ def _math_op_common( ): new_cube.data = ma.masked_array(0, 1, dtype=new_dtype) - iris.analysis.clear_phenomenon_identity(new_cube) - new_cube.units = new_unit + _sanitise_metadata(new_cube, new_unit) + return new_cube @@ -965,12 +957,12 @@ def __init__(self, data_func, units_func): are given as positional arguments. Should return another data array, with the same shape as the first array. - Can also have keyword arguments. + May also have keyword arguments. * units_func: - Function to calculate the unit of the resulting cube. - Should take the cube(s) as input and return + Function to calculate the units of the resulting cube. + Should take the cube/s as input and return an instance of :class:`cf_units.Unit`. Returns: @@ -1008,6 +1000,22 @@ def ws_units_func(u_cube, v_cube): cs_cube = cs_ifunc(cube, axis=1) """ + self._data_func_name = getattr( + data_func, "__name__", "data_func argument passed to IFunc" + ) + + if not callable(data_func): + emsg = f"{self._data_func_name} is not callable." + raise TypeError(emsg) + + self._unit_func_name = getattr( + units_func, "__name__", "units_func argument passed to IFunc" + ) + + if not callable(units_func): + emsg = f"{self._unit_func_name} is not callable." + raise TypeError(emsg) + if hasattr(data_func, "nin"): self.nin = data_func.nin else: @@ -1023,39 +1031,38 @@ def ws_units_func(u_cube, v_cube): self.nin = len(args) if self.nin not in [1, 2]: - msg = ( - "{} requires {} input data arrays, the IFunc class " - "currently only supports functions requiring 1 or two " - "data arrays as input." + emsg = ( + f"{self._data_func_name} requires {self.nin} input data " + "arrays, the IFunc class currently only supports functions " + "requiring 1 or 2 data arrays as input." ) - raise ValueError(msg.format(data_func.__name__, self.nin)) + raise ValueError(emsg) if hasattr(data_func, "nout"): if data_func.nout != 1: - msg = ( - "{} returns {} objects, the IFunc class currently " - "only supports functions returning a single object." - ) - raise ValueError( - msg.format(data_func.__name__, data_func.nout) + emsg = ( + f"{self._data_func_name} returns {data_func.nout} objects, " + "the IFunc class currently only supports functions " + "returning a single object." ) + raise ValueError(emsg) self.data_func = data_func - self.units_func = units_func def __repr__(self): - return "iris.analysis.maths.IFunc({}, {})".format( - self.data_func.__name__, self.units_func.__name__ + result = ( + f"iris.analysis.maths.IFunc({self._data_func_name}, " + f"{self._unit_func_name})" ) + return result def __str__(self): - return ( - "IFunc constructed from the data function {} " - "and the units function {}".format( - self.data_func.__name__, self.units_func.__name__ - ) + result = ( + f"IFunc constructed from the data function {self._data_func_name} " + f"and the units function {self._unit_func_name}" ) + return result def __call__( self, @@ -1105,11 +1112,27 @@ def wrap_data_func(*args, **kwargs): return self.data_func(*args, **kwargs_combined) - if self.nin == 2: + if self.nin == 1: + if other is not None: + dmsg = ( + "ignoring surplus 'other' argument to IFunc.__call__, " + f"provided data_func {self._data_func_name!r} only requires " + "1 input" + ) + logger.debug(dmsg) + + new_unit = self.units_func(cube) + + new_cube = _math_op_common( + cube, wrap_data_func, new_unit, in_place=in_place + ) + else: if other is None: - raise ValueError( - self.data_func.__name__ + " requires two arguments" + emsg = ( + f"{self._data_func_name} requires two arguments, another " + "cube must also be passed to IFunc.__call__." ) + raise ValueError(emsg) new_unit = self.units_func(cube, other) @@ -1123,21 +1146,6 @@ def wrap_data_func(*args, **kwargs): in_place=in_place, ) - elif self.nin == 1: - if other is not None: - raise ValueError( - self.data_func.__name__ + " requires one argument" - ) - - new_unit = self.units_func(cube) - - new_cube = _math_op_common( - cube, wrap_data_func, new_unit, in_place=in_place - ) - - else: - raise ValueError("self.nin should be 1 or 2.") - if new_name is not None: new_cube.rename(new_name) diff --git a/lib/iris/common/__init__.py b/lib/iris/common/__init__.py index 52759dac8e..c540d81bc0 100644 --- a/lib/iris/common/__init__.py +++ b/lib/iris/common/__init__.py @@ -8,3 +8,4 @@ from .lenient import * from .metadata import * from .mixin import * +from .resolve import * diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index eba3ffc6e8..dda6916727 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -7,10 +7,15 @@ from abc import ABCMeta from collections import namedtuple from collections.abc import Iterable, Mapping +from copy import deepcopy from functools import wraps import logging import re +import numpy as np +import numpy.ma as ma +from xxhash import xxh64_hexdigest + from .lenient import _LENIENT from .lenient import _lenient_service as lenient_service from .lenient import _qualname as qualname @@ -26,6 +31,7 @@ "CellMeasureMetadata", "CoordMetadata", "CubeMetadata", + "DimCoordMetadata", "metadata_manager_factory", ] @@ -37,6 +43,39 @@ logger = logging.getLogger(__name__) +def _hexdigest(value): + """ + Return a hexidecimal string hash representation of the provided value. + + Calculates a 64-bit non-cryptographic hash of the provided value, + and returns the hexdigest string representation of the calculated hash. + + """ + # Special case: deal with numpy arrays. + if ma.isMaskedArray(value): + parts = ( + value.shape, + xxh64_hexdigest(value.data), + xxh64_hexdigest(value.mask), + ) + value = str(parts) + elif isinstance(value, np.ndarray): + parts = (value.shape, xxh64_hexdigest(value)) + value = str(parts) + + try: + # Calculate single-shot hash to avoid allocating state on the heap + result = xxh64_hexdigest(value) + except TypeError: + # xxhash expects a bytes-like object, so try hashing the + # string representation of the provided value instead, but + # also fold in the object type... + parts = (type(value), value) + result = xxh64_hexdigest(str(parts)) + + return result + + class _NamedTupleMeta(ABCMeta): """ Meta-class to support the convenience of creating a namedtuple from @@ -48,8 +87,8 @@ def __new__(mcs, name, bases, namespace): names = [] for base in bases: - if hasattr(base, "_members"): - base_names = getattr(base, "_members") + if hasattr(base, "_fields"): + base_names = getattr(base, "_fields") is_abstract = getattr( base_names, "__isabstractmethod__", False ) @@ -115,6 +154,7 @@ def __eq__(self, other): """ result = NotImplemented + # Only perform equivalence with similar class instances. if hasattr(other, "__class__") and other.__class__ is self.__class__: if _LENIENT(self.__eq__) or _LENIENT(self.equal): # Perform "lenient" equality. @@ -125,7 +165,22 @@ def __eq__(self, other): else: # Perform "strict" equality. logger.debug("strict", extra=dict(cls=self.__class__.__name__)) - result = super().__eq__(other) + + def func(field): + left = getattr(self, field) + right = getattr(other, field) + if self._is_attributes(field, left, right): + result = self._compare_strict_attributes(left, right) + else: + result = left == right + return result + + # Note that, for strict we use "_fields" not "_members". + # The "circular" member does not participate in strict equivalence. + fields = filter( + lambda field: field != "circular", self._fields + ) + result = all([func(field) for field in fields]) return result @@ -134,6 +189,18 @@ def __lt__(self, other): # Support Python2 behaviour for a "<" operation involving a # "NoneType" operand. # + if not isinstance(other, BaseMetadata): + return NotImplemented + + if ( + self.__class__ is CoordMetadata + and other.__class__ is DimCoordMetadata + ) or ( + self.__class__ is DimCoordMetadata + and other.__class__ is CoordMetadata + ): + other = self.from_metadata(other) + if not isinstance(other, self.__class__): return NotImplemented @@ -183,6 +250,7 @@ def _api_common( The result of the service operation to the parent service caller. """ + # Ensure that we have similar class instances. if ( not hasattr(other, "__class__") or other.__class__ is not self.__class__ @@ -219,8 +287,13 @@ def _combine(self, other): logger.debug("strict", extra=dict(cls=self.__class__.__name__)) def func(field): - value = getattr(self, field) - return value if value == getattr(other, field) else None + left = getattr(self, field) + right = getattr(other, field) + if self._is_attributes(field, left, right): + result = self._combine_strict_attributes(left, right) + else: + result = left if left == right else None + return result # Note that, for strict we use "_fields" not "_members". values = [func(field) for field in self._fields] @@ -265,8 +338,14 @@ def func(field): @staticmethod def _combine_lenient_attributes(left, right): """Leniently combine the dictionary members together.""" - sleft = set(left.items()) - sright = set(right.items()) + # Copy the dictionaries. + left = deepcopy(left) + right = deepcopy(right) + # Use xxhash to perform an extremely fast non-cryptographic hash of + # each dictionary key rvalue, thus ensuring that the dictionary is + # completely hashable, as required by a set. + sleft = {(k, _hexdigest(v)) for k, v in left.items()} + sright = {(k, _hexdigest(v)) for k, v in right.items()} # Intersection of common items. common = sleft & sright # Items in sleft different from sright. @@ -279,9 +358,27 @@ def _combine_lenient_attributes(left, right): [dsleft.pop(key) for key in keys] [dsright.pop(key) for key in keys] # Now bring the result together. - result = dict(common) - result.update(dsleft) - result.update(dsright) + result = {k: left[k] for k, _ in common} + result.update({k: left[k] for k in dsleft.keys()}) + result.update({k: right[k] for k in dsright.keys()}) + + return result + + @staticmethod + def _combine_strict_attributes(left, right): + """Perform strict combination of the dictionary members.""" + # Copy the dictionaries. + left = deepcopy(left) + right = deepcopy(right) + # Use xxhash to perform an extremely fast non-cryptographic hash of + # each dictionary key rvalue, thus ensuring that the dictionary is + # completely hashable, as required by a set. + sleft = {(k, _hexdigest(v)) for k, v in left.items()} + sright = {(k, _hexdigest(v)) for k, v in right.items()} + # Intersection of common items. + common = sleft & sright + # Now bring the result together. + result = {k: left[k] for k, _ in common} return result @@ -318,15 +415,25 @@ def func(field): return result # Note that, we use "_members" not "_fields". - result = all([func(field) for field in BaseMetadata._members]) + # Lenient equality explicitly ignores the "var_name" member. + result = all( + [ + func(field) + for field in BaseMetadata._members + if field != "var_name" + ] + ) return result @staticmethod def _compare_lenient_attributes(left, right): """Perform lenient compare between the dictionary members.""" - sleft = set(left.items()) - sright = set(right.items()) + # Use xxhash to perform an extremely fast non-cryptographic hash of + # each dictionary key rvalue, thus ensuring that the dictionary is + # completely hashable, as required by a set. + sleft = {(k, _hexdigest(v)) for k, v in left.items()} + sright = {(k, _hexdigest(v)) for k, v in right.items()} # Items in sleft different from sright. dsleft = dict(sleft - sright) # Items in sright different from sleft. @@ -336,6 +443,17 @@ def _compare_lenient_attributes(left, right): return not bool(keys) + @staticmethod + def _compare_strict_attributes(left, right): + """Perform strict compare between the dictionary members.""" + # Use xxhash to perform an extremely fast non-cryptographic hash of + # each dictionary key rvalue, thus ensuring that the dictionary is + # completely hashable, as required by a set. + sleft = {(k, _hexdigest(v)) for k, v in left.items()} + sright = {(k, _hexdigest(v)) for k, v in right.items()} + + return sleft == sright + def _difference(self, other): """Perform associated metadata member difference.""" if _LENIENT(self.difference): @@ -397,8 +515,11 @@ def func(field): @staticmethod def _difference_lenient_attributes(left, right): """Perform lenient difference between the dictionary members.""" - sleft = set(left.items()) - sright = set(right.items()) + # Use xxhash to perform an extremely fast non-cryptographic hash of + # each dictionary key rvalue, thus ensuring that the dictionary is + # completely hashable, as required by a set. + sleft = {(k, _hexdigest(v)) for k, v in left.items()} + sright = {(k, _hexdigest(v)) for k, v in right.items()} # Items in sleft different from sright. dsleft = dict(sleft - sright) # Items in sright different from sleft. @@ -412,6 +533,9 @@ def _difference_lenient_attributes(left, right): if not bool(dsleft) and not bool(dsright): result = None else: + # Replace hash-rvalue with original rvalue. + dsleft = {k: left[k] for k in dsleft.keys()} + dsright = {k: right[k] for k in dsright.keys()} result = (dsleft, dsright) return result @@ -419,8 +543,11 @@ def _difference_lenient_attributes(left, right): @staticmethod def _difference_strict_attributes(left, right): """Perform strict difference between the dictionary members.""" - sleft = set(left.items()) - sright = set(right.items()) + # Use xxhash to perform an extremely fast non-cryptographic hash of + # each dictionary key rvalue, thus ensuring that the dictionary is + # completely hashable, as required by a set. + sleft = {(k, _hexdigest(v)) for k, v in left.items()} + sright = {(k, _hexdigest(v)) for k, v in right.items()} # Items in sleft different from sright. dsleft = dict(sleft - sright) # Items in sright different from sleft. @@ -429,6 +556,9 @@ def _difference_strict_attributes(left, right): if not bool(dsleft) and not bool(dsright): result = None else: + # Replace hash-rvalue with original rvalue. + dsleft = {k: left[k] for k in dsleft.keys()} + dsright = {k: right[k] for k in dsright.keys()} result = (dsleft, dsright) return result @@ -528,6 +658,20 @@ def equal(self, other, lenient=None): ) return result + @classmethod + def from_metadata(cls, other): + result = None + if isinstance(other, BaseMetadata): + if other.__class__ is cls: + result = other + else: + kwargs = {field: None for field in cls._fields} + fields = set(cls._fields) & set(other._fields) + for field in fields: + kwargs[field] = getattr(other, field) + result = cls(**kwargs) + return result + def name(self, default=None, token=False): """ Returns a string name representing the identity of the metadata. @@ -735,6 +879,13 @@ class CoordMetadata(BaseMetadata): @wraps(BaseMetadata.__eq__, assigned=("__doc__",), updated=()) @lenient_service def __eq__(self, other): + # Convert a DimCoordMetadata instance to a CoordMetadata instance. + if ( + self.__class__ is CoordMetadata + and hasattr(other, "__class__") + and other.__class__ is DimCoordMetadata + ): + other = self.from_metadata(other) return super().__eq__(other) def _combine_lenient(self, other): @@ -758,7 +909,7 @@ def func(field): return left if left == right else None # Note that, we use "_members" not "_fields". - values = [func(field) for field in self._members] + values = [func(field) for field in CoordMetadata._members] # Perform lenient combination of the other parent members. result = super()._combine_lenient(other) result.extend(values) @@ -779,10 +930,11 @@ def _compare_lenient(self, other): Boolean. """ + # Perform "strict" comparison for "coord_system" and "climatological". result = all( [ getattr(self, field) == getattr(other, field) - for field in self._members + for field in CoordMetadata._members ] ) if result: @@ -812,7 +964,7 @@ def func(field): return None if left == right else (left, right) # Note that, we use "_members" not "_fields". - values = [func(field) for field in self._members] + values = [func(field) for field in CoordMetadata._members] # Perform lenient difference of the other parent members. result = super()._difference_lenient(other) result.extend(values) @@ -822,16 +974,37 @@ def func(field): @wraps(BaseMetadata.combine, assigned=("__doc__",), updated=()) @lenient_service def combine(self, other, lenient=None): + # Convert a DimCoordMetadata instance to a CoordMetadata instance. + if ( + self.__class__ is CoordMetadata + and hasattr(other, "__class__") + and other.__class__ is DimCoordMetadata + ): + other = self.from_metadata(other) return super().combine(other, lenient=lenient) @wraps(BaseMetadata.difference, assigned=("__doc__",), updated=()) @lenient_service def difference(self, other, lenient=None): + # Convert a DimCoordMetadata instance to a CoordMetadata instance. + if ( + self.__class__ is CoordMetadata + and hasattr(other, "__class__") + and other.__class__ is DimCoordMetadata + ): + other = self.from_metadata(other) return super().difference(other, lenient=lenient) @wraps(BaseMetadata.equal, assigned=("__doc__",), updated=()) @lenient_service def equal(self, other, lenient=None): + # Convert a DimCoordMetadata instance to a CoordMetadata instance. + if ( + self.__class__ is CoordMetadata + and hasattr(other, "__class__") + and other.__class__ is DimCoordMetadata + ): + other = self.from_metadata(other) return super().equal(other, lenient=lenient) @@ -992,6 +1165,82 @@ def _check(item): return result +class DimCoordMetadata(CoordMetadata): + """ + Metadata container for a :class:`~iris.coords.DimCoord" + + """ + + # The "circular" member is stateful only, and does not participate + # in lenient/strict equivalence. + _members = ("circular",) + + __slots__ = () + + @wraps(CoordMetadata.__eq__, assigned=("__doc__",), updated=()) + @lenient_service + def __eq__(self, other): + # Convert a CoordMetadata instance to a DimCoordMetadata instance. + if hasattr(other, "__class__") and other.__class__ is CoordMetadata: + other = self.from_metadata(other) + return super().__eq__(other) + + @wraps(CoordMetadata._combine_lenient, assigned=("__doc__",), updated=()) + def _combine_lenient(self, other): + # Perform "strict" combination for "circular". + value = self.circular if self.circular == other.circular else None + # Perform lenient combination of the other parent members. + result = super()._combine_lenient(other) + result.append(value) + + return result + + @wraps(CoordMetadata._compare_lenient, assigned=("__doc__",), updated=()) + def _compare_lenient(self, other): + # The "circular" member is not part of lenient equivalence. + return super()._compare_lenient(other) + + @wraps( + CoordMetadata._difference_lenient, assigned=("__doc__",), updated=() + ) + def _difference_lenient(self, other): + # Perform "strict" difference for "circular". + value = ( + None + if self.circular == other.circular + else (self.circular, other.circular) + ) + # Perform lenient difference of the other parent members. + result = super()._difference_lenient(other) + result.append(value) + + return result + + @wraps(CoordMetadata.combine, assigned=("__doc__",), updated=()) + @lenient_service + def combine(self, other, lenient=None): + # Convert a CoordMetadata instance to a DimCoordMetadata instance. + if hasattr(other, "__class__") and other.__class__ is CoordMetadata: + other = self.from_metadata(other) + return super().combine(other, lenient=lenient) + + @wraps(CoordMetadata.difference, assigned=("__doc__",), updated=()) + @lenient_service + def difference(self, other, lenient=None): + # Convert a CoordMetadata instance to a DimCoordMetadata instance. + if hasattr(other, "__class__") and other.__class__ is CoordMetadata: + other = self.from_metadata(other) + return super().difference(other, lenient=lenient) + + @wraps(CoordMetadata.equal, assigned=("__doc__",), updated=()) + @lenient_service + def equal(self, other, lenient=None): + # Convert a CoordMetadata instance to a DimCoordMetadata instance. + if hasattr(other, "__class__") and other.__class__ is CoordMetadata: + other = self.from_metadata(other) + return super().equal(other, lenient=lenient) + + def metadata_manager_factory(cls, **kwargs): """ A class instance factory function responsible for manufacturing @@ -1137,6 +1386,7 @@ def values(self): CellMeasureMetadata.combine, CoordMetadata.combine, CubeMetadata.combine, + DimCoordMetadata.combine, ) @@ -1147,6 +1397,7 @@ def values(self): CellMeasureMetadata.difference, CoordMetadata.difference, CubeMetadata.difference, + DimCoordMetadata.difference, ) @@ -1162,6 +1413,8 @@ def values(self): CoordMetadata.equal, CubeMetadata.__eq__, CubeMetadata.equal, + DimCoordMetadata.__eq__, + DimCoordMetadata.equal, ) diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py new file mode 100644 index 0000000000..7098eaa65e --- /dev/null +++ b/lib/iris/common/resolve.py @@ -0,0 +1,1542 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. + +from collections import namedtuple +from collections.abc import Iterable +import logging + +from dask.array.core import broadcast_shapes +import numpy as np + +from iris.common import LENIENT + + +__all__ = ["Resolve"] + + +# Configure the logger. +logger = logging.getLogger(__name__) + + +_AuxCoverage = namedtuple( + "AuxCoverage", + [ + "cube", + "common_items_aux", + "common_items_scalar", + "local_items_aux", + "local_items_scalar", + "dims_common", + "dims_local", + "dims_free", + ], +) + +_CategoryItems = namedtuple( + "CategoryItems", ["items_dim", "items_aux", "items_scalar"], +) + +_DimCoverage = namedtuple( + "DimCoverage", + ["cube", "metadata", "coords", "dims_common", "dims_local", "dims_free"], +) + +_Item = namedtuple("Item", ["metadata", "coord", "dims"]) + +_PreparedFactory = namedtuple("PreparedFactory", ["container", "dependencies"]) + +_PreparedItem = namedtuple( + "PreparedItem", ["metadata", "points", "bounds", "dims", "container"], +) + +_PreparedMetadata = namedtuple("PreparedMetadata", ["combined", "src", "tgt"]) + + +class Resolve: + def __init__(self, lhs=None, rhs=None): + if lhs is not None or rhs is not None: + self(lhs, rhs) + + def __call__(self, lhs, rhs): + self._init(lhs, rhs) + + self._metadata_resolve() + self._metadata_coverage() + + if self._debug: + self._debug_items(self.lhs_cube_category_local, title="LHS local") + self._debug_items(self.rhs_cube_category_local, title="RHS local") + self._debug_items(self.category_common, title="common") + logger.debug(f"map_rhs_to_lhs={self.map_rhs_to_lhs}") + + self._metadata_mapping() + self._metadata_prepare() + + def _as_compatible_cubes(self): + from iris.cube import Cube + + src_cube = self._src_cube + tgt_cube = self._tgt_cube + + # Use the mapping to calculate the new src cube shape. + new_src_shape = [1] * tgt_cube.ndim + for src_dim, tgt_dim in self.mapping.items(): + new_src_shape[tgt_dim] = src_cube.shape[src_dim] + new_src_shape = tuple(new_src_shape) + dmsg = ( + f"new src {self._src_cube_position} cube shape {new_src_shape}, " + f"actual shape {src_cube.shape}" + ) + logger.debug(dmsg) + + try: + # Determine whether the tgt cube shape and proposed new src + # cube shape will successfully broadcast together. + self._broadcast_shape = broadcast_shapes( + tgt_cube.shape, new_src_shape + ) + except ValueError: + emsg = ( + "Cannot resolve cubes, as a suitable transpose of the " + f"{self._src_cube_position} cube {src_cube.name()!r} " + f"will not broadcast with the {self._tgt_cube_position} cube " + f"{tgt_cube.name()!r}." + ) + raise ValueError(emsg) + + new_src_data = src_cube.core_data().copy() + + # Use the mapping to determine the transpose sequence of + # src dimensions in increasing tgt dimension order. + order = [ + src_dim + for src_dim, tgt_dim in sorted( + self.mapping.items(), key=lambda pair: pair[1] + ) + ] + + # Determine whether a transpose of the src cube is necessary. + if order != sorted(order): + new_src_data = new_src_data.transpose(order) + logger.debug( + f"transpose src {self._src_cube_position} cube with order {order}" + ) + + # Determine whether a reshape is necessary. + if new_src_shape != new_src_data.shape: + new_src_data = new_src_data.reshape(new_src_shape) + logger.debug( + f"reshape src {self._src_cube_position} cube to new shape {new_src_shape}" + ) + + # Create the new src cube. + new_src_cube = Cube(new_src_data) + new_src_cube.metadata = src_cube.metadata + + def add_coord(coord, dim_coord=False): + src_dims = src_cube.coord_dims(coord) + tgt_dims = [self.mapping[src_dim] for src_dim in src_dims] + if dim_coord: + new_src_cube.add_dim_coord(coord, tgt_dims) + else: + new_src_cube.add_aux_coord(coord, tgt_dims) + + # Add the dim coordinates to the new src cube. + for coord in src_cube.dim_coords: + add_coord(coord, dim_coord=True) + + # Add the aux and scalar coordinates to the new src cube. + for coord in src_cube.aux_coords: + add_coord(coord) + + # Add the aux factories to the new src cube. + for factory in src_cube.aux_factories: + new_src_cube.add_aux_factory(factory) + + # Set the resolved cubes. + self._src_cube_resolved = new_src_cube + self._tgt_cube_resolved = tgt_cube + + @staticmethod + def _aux_coverage( + cube, + cube_items_aux, + cube_items_scalar, + common_aux_metadata, + common_scalar_metadata, + ): + common_items_aux = [] + common_items_scalar = [] + local_items_aux = [] + local_items_scalar = [] + dims_common = [] + dims_local = [] + dims_free = set(range(cube.ndim)) + + for item in cube_items_aux: + [dims_free.discard(dim) for dim in item.dims] + + if item.metadata in common_aux_metadata: + common_items_aux.append(item) + dims_common.extend(item.dims) + else: + local_items_aux.append(item) + dims_local.extend(item.dims) + + for item in cube_items_scalar: + if item.metadata in common_scalar_metadata: + common_items_scalar.append(item) + else: + local_items_scalar.append(item) + + return _AuxCoverage( + cube=cube, + common_items_aux=common_items_aux, + common_items_scalar=common_items_scalar, + local_items_aux=local_items_aux, + local_items_scalar=local_items_scalar, + dims_common=sorted(set(dims_common)), + dims_local=sorted(set(dims_local)), + dims_free=sorted(dims_free), + ) + + def _aux_mapping(self, src_coverage, tgt_coverage): + for tgt_item in tgt_coverage.common_items_aux: + # Search for a src aux metadata match. + tgt_metadata = tgt_item.metadata + src_items = tuple( + filter( + lambda src_item: src_item.metadata == tgt_metadata, + src_coverage.common_items_aux, + ) + ) + if src_items: + # Multiple matching src metadata must cover the same src + # dimensions. + src_dims = src_items[0].dims + if all(map(lambda item: item.dims == src_dims, src_items)): + # Ensure src and tgt have equal rank. + tgt_dims = tgt_item.dims + if len(src_dims) == len(tgt_dims): + for src_dim, tgt_dim in zip(src_dims, tgt_dims): + self.mapping[src_dim] = tgt_dim + logger.debug(f"{src_dim}->{tgt_dim}") + else: + # This situation can only occur due to a systemic internal + # failure to correctly identify common aux coordinate metadata + # coverage between the cubes. + emsg = ( + "Failed to map common aux coordinate metadata from " + "source cube {!r} to target cube {!r}, using {!r} on " + "target cube dimension{} {}." + ) + raise ValueError( + emsg.format( + src_coverage.cube.name(), + tgt_coverage.cube.name(), + tgt_metadata, + "s" if len(tgt_item.dims) > 1 else "", + tgt_item.dims, + ) + ) + + @staticmethod + def _categorise_items(cube): + category = _CategoryItems(items_dim=[], items_aux=[], items_scalar=[]) + + # Categorise the dim coordinates of the cube. + for coord in cube.dim_coords: + item = _Item( + metadata=coord.metadata, + coord=coord, + dims=cube.coord_dims(coord), + ) + category.items_dim.append(item) + + # Categorise the aux and scalar coordinates of the cube. + for coord in cube.aux_coords: + dims = cube.coord_dims(coord) + item = _Item(metadata=coord.metadata, coord=coord, dims=dims) + if dims: + category.items_aux.append(item) + else: + category.items_scalar.append(item) + + return category + + @staticmethod + def _create_prepared_item(coord, dims, src=None, tgt=None): + if src is not None and tgt is not None: + combined = src.combine(tgt) + else: + combined = src or tgt + if not isinstance(dims, Iterable): + dims = (dims,) + prepared_metadata = _PreparedMetadata( + combined=combined, src=src, tgt=tgt + ) + bounds = coord.bounds + result = _PreparedItem( + metadata=prepared_metadata, + points=coord.points.copy(), + bounds=bounds if bounds is None else bounds.copy(), + dims=dims, + container=type(coord), + ) + return result + + @property + def _debug(self): + result = False + level = logger.getEffectiveLevel() + if level != logging.NOTSET: + result = logging.DEBUG >= level + return result + + @staticmethod + def _debug_items(items, title=None): + def _show(items, heading): + logger.debug(f"{title}{heading}:") + for item in items: + dmsg = f"metadata={item.metadata}, dims={item.dims}, bounds={item.coord.has_bounds()}" + logger.debug(dmsg) + + title = f"{title} " if title else "" + _show(items.items_dim, "dim") + _show(items.items_aux, "aux") + _show(items.items_scalar, "scalar") + + @staticmethod + def _dim_coverage(cube, cube_items_dim, common_dim_metadata): + ndim = cube.ndim + metadata = [None] * ndim + coords = [None] * ndim + dims_common = [] + dims_local = [] + dims_free = set(range(ndim)) + + for item in cube_items_dim: + (dim,) = item.dims + dims_free.discard(dim) + metadata[dim] = item.metadata + coords[dim] = item.coord + if item.metadata in common_dim_metadata: + dims_common.append(dim) + else: + dims_local.append(dim) + + return _DimCoverage( + cube=cube, + metadata=metadata, + coords=coords, + dims_common=sorted(dims_common), + dims_local=sorted(dims_local), + dims_free=sorted(dims_free), + ) + + def _dim_mapping(self, src_coverage, tgt_coverage): + for tgt_dim in tgt_coverage.dims_common: + # Search for a src dim metadata match. + tgt_metadata = tgt_coverage.metadata[tgt_dim] + try: + src_dim = src_coverage.metadata.index(tgt_metadata) + self.mapping[src_dim] = tgt_dim + logger.debug(f"{src_dim}->{tgt_dim}") + except ValueError: + # This exception can only occur due to a systemic internal + # failure to correctly identify common dim coordinate metadata + # coverage between the cubes. + emsg = ( + "Failed to map common dim coordinate metadata from " + "source cube {!r} to target cube {!r}, using {!r} on " + "target cube dimension {}." + ) + raise ValueError( + emsg.format( + src_coverage.cube.name(), + tgt_coverage.cube.name(), + tgt_metadata, + tuple([tgt_dim]), + ) + ) + + def _free_mapping( + self, + src_dim_coverage, + tgt_dim_coverage, + src_aux_coverage, + tgt_aux_coverage, + ): + src_cube = src_dim_coverage.cube + tgt_cube = tgt_dim_coverage.cube + src_ndim = src_cube.ndim + tgt_ndim = tgt_cube.ndim + + # mapping src to tgt, involving free dimensions on either the src/tgt. + free_mapping = {} + + # Determine the src/tgt dimensions that are not mapped, + # and not covered by any metadata. + src_free = set(src_dim_coverage.dims_free) & set( + src_aux_coverage.dims_free + ) + tgt_free = set(tgt_dim_coverage.dims_free) & set( + tgt_aux_coverage.dims_free + ) + + if src_free or tgt_free: + # Determine the src/tgt dimensions that are not mapped. + src_unmapped = set(range(src_ndim)) - set(self.mapping) + tgt_unmapped = set(range(tgt_ndim)) - set(self.mapping.values()) + + # Determine the src/tgt dimensions that are not mapped, + # but are covered by a src/tgt local coordinate. + src_unmapped_local = src_unmapped - src_free + tgt_unmapped_local = tgt_unmapped - tgt_free + + src_shape = src_cube.shape + tgt_shape = tgt_cube.shape + src_max, tgt_max = max(src_shape), max(tgt_shape) + + def assign_mapping(extent, unmapped_local_items, free_items=None): + result = None + if free_items is None: + free_items = [] + if extent == 1: + if unmapped_local_items: + result, _ = unmapped_local_items.pop(0) + elif free_items: + result, _ = free_items.pop(0) + else: + + def _filter(items): + return list( + filter(lambda item: item[1] == extent, items) + ) + + def _pop(item, items): + result, _ = item + index = items.index(item) + items.pop(index) + return result + + items = _filter(unmapped_local_items) + if items: + result = _pop(items[0], unmapped_local_items) + else: + items = _filter(free_items) + if items: + result = _pop(items[0], free_items) + return result + + if src_free: + # Attempt to map src free dimensions to tgt unmapped local or free dimensions. + tgt_unmapped_local_items = [ + (dim, tgt_shape[dim]) for dim in tgt_unmapped_local + ] + tgt_free_items = [(dim, tgt_shape[dim]) for dim in tgt_free] + + for src_dim in sorted( + src_free, key=lambda dim: (src_max - src_shape[dim], dim) + ): + tgt_dim = assign_mapping( + src_shape[src_dim], + tgt_unmapped_local_items, + tgt_free_items, + ) + if tgt_dim is None: + # Failed to map the src free dimension + # to a suitable tgt local/free dimension. + dmsg = ( + f"failed to map src free dimension ({src_dim},) from " + f"{self._src_cube_position} cube {src_cube.name()!r} to " + f"{self._tgt_cube_position} cube {tgt_cube.name()!r}." + ) + logger.debug(dmsg) + break + free_mapping[src_dim] = tgt_dim + else: + # Attempt to map tgt free dimensions to src unmapped local dimensions. + src_unmapped_local_items = [ + (dim, src_shape[dim]) for dim in src_unmapped_local + ] + + for tgt_dim in sorted( + tgt_free, key=lambda dim: (tgt_max - tgt_shape[dim], dim) + ): + src_dim = assign_mapping( + tgt_shape[tgt_dim], src_unmapped_local_items + ) + if src_dim is not None: + free_mapping[src_dim] = tgt_dim + if not src_unmapped_local_items: + # There are no more src unmapped local dimensions. + break + + # Determine whether there are still unmapped src dimensions. + src_unmapped = ( + set(range(src_cube.ndim)) - set(self.mapping) - set(free_mapping) + ) + + if src_unmapped: + plural = "s" if len(src_unmapped) > 1 else "" + emsg = ( + "Insufficient matching coordinate metadata to resolve cubes, " + f"cannot map dimension{plural} {tuple(sorted(src_unmapped))} " + f"of the {self._src_cube_position} cube {src_cube.name()!r} " + f"to the {self._tgt_cube_position} cube {tgt_cube.name()!r}." + ) + raise ValueError(emsg) + + # Update the mapping. + self.mapping.update(free_mapping) + logger.debug(f"mapping free dimensions gives, mapping={self.mapping}") + + def _init(self, lhs, rhs): + from iris.cube import Cube + + emsg = ( + "{cls} requires {arg!r} argument to be a 'Cube', got {actual!r}." + ) + clsname = self.__class__.__name__ + + if not isinstance(lhs, Cube): + raise TypeError( + emsg.format(cls=clsname, arg="LHS", actual=type(lhs)) + ) + + if not isinstance(rhs, Cube): + raise TypeError( + emsg.format(cls=clsname, arg="RHS", actual=type(rhs)) + ) + + # The LHS cube to be resolved into the resultant cube. + self.lhs_cube = lhs + # The RHS cube to be resolved into the resultant cube. + self.rhs_cube = rhs + + # The transposed/reshaped (if required) LHS cube, which + # can be broadcast with RHS cube. + self.lhs_cube_resolved = None + # The transposed/reshaped (if required) RHS cube, which + # can be broadcast with LHS cube. + self.rhs_cube_resolved = None + + # Categorised dim, aux and scalar coordinate items for LHS cube. + self.lhs_cube_category = None + # Categorised dim, aux and scalar coordinate items for RHS cube. + self.rhs_cube_category = None + + # Categorised dim, aux and scalar coordinate items local to LHS cube only. + self.lhs_cube_category_local = _CategoryItems( + items_dim=[], items_aux=[], items_scalar=[] + ) + # Categorised dim, aux and scalar coordinate items local to RHS cube only. + self.rhs_cube_category_local = _CategoryItems( + items_dim=[], items_aux=[], items_scalar=[] + ) + # Categorised dim, aux and scalar coordinate items common to both + # LHS cube and RHS cube. + self.category_common = _CategoryItems( + items_dim=[], items_aux=[], items_scalar=[] + ) + + # Analysis of dim coordinates spanning LHS cube. + self.lhs_cube_dim_coverage = None + # Analysis of aux and scalar coordinates spanning LHS cube. + self.lhs_cube_aux_coverage = None + # Analysis of dim coordinates spanning RHS cube. + self.rhs_cube_dim_coverage = None + # Analysis of aux and scalar coordinates spanning RHS cube. + self.rhs_cube_aux_coverage = None + + # Map common metadata from RHS cube to LHS cube if LHS-rank >= RHS-rank, + # otherwise map common metadata from LHS cube to RHS cube. + if self.lhs_cube.ndim >= self.rhs_cube.ndim: + self.map_rhs_to_lhs = True + else: + self.map_rhs_to_lhs = False + + # Mapping of the dimensions between common metadata for the cubes, + # where the direction of the mapping is governed by map_rhs_to_lhs. + self.mapping = None + + # Cache containing a list of dim, aux and scalar coordinates prepared + # and ready for creating and attaching to the resultant cube. + self.prepared_category = None + + # Cache containing a list of aux factories prepared and ready for + # creating and attaching to the resultant cube. + self.prepared_factories = None + + # The shape of the resultant resolved cube. + self._broadcast_shape = None + + def _metadata_coverage(self): + # Determine the common dim coordinate metadata coverage. + common_dim_metadata = [ + item.metadata for item in self.category_common.items_dim + ] + + self.lhs_cube_dim_coverage = self._dim_coverage( + self.lhs_cube, + self.lhs_cube_category.items_dim, + common_dim_metadata, + ) + self.rhs_cube_dim_coverage = self._dim_coverage( + self.rhs_cube, + self.rhs_cube_category.items_dim, + common_dim_metadata, + ) + + # Determine the common aux and scalar coordinate metadata coverage. + common_aux_metadata = [ + item.metadata for item in self.category_common.items_aux + ] + common_scalar_metadata = [ + item.metadata for item in self.category_common.items_scalar + ] + + self.lhs_cube_aux_coverage = self._aux_coverage( + self.lhs_cube, + self.lhs_cube_category.items_aux, + self.lhs_cube_category.items_scalar, + common_aux_metadata, + common_scalar_metadata, + ) + self.rhs_cube_aux_coverage = self._aux_coverage( + self.rhs_cube, + self.rhs_cube_category.items_aux, + self.rhs_cube_category.items_scalar, + common_aux_metadata, + common_scalar_metadata, + ) + + def _metadata_mapping(self): + # Initialise the state. + self.mapping = {} + + # Map RHS cube to LHS cube, or smaller to larger cube rank. + if self.map_rhs_to_lhs: + src_cube = self.rhs_cube + src_dim_coverage = self.rhs_cube_dim_coverage + src_aux_coverage = self.rhs_cube_aux_coverage + tgt_cube = self.lhs_cube + tgt_dim_coverage = self.lhs_cube_dim_coverage + tgt_aux_coverage = self.lhs_cube_aux_coverage + else: + src_cube = self.lhs_cube + src_dim_coverage = self.lhs_cube_dim_coverage + src_aux_coverage = self.lhs_cube_aux_coverage + tgt_cube = self.rhs_cube + tgt_dim_coverage = self.rhs_cube_dim_coverage + tgt_aux_coverage = self.rhs_cube_aux_coverage + + # Use the dim coordinates to fully map the + # src cube dimensions to the tgt cube dimensions. + self._dim_mapping(src_dim_coverage, tgt_dim_coverage) + logger.debug( + f"mapping common dim coordinates gives, mapping={self.mapping}" + ) + + # If necessary, use the aux coordinates to fully map the + # src cube dimensions to the tgt cube dimensions. + if not self.mapped: + self._aux_mapping(src_aux_coverage, tgt_aux_coverage) + logger.debug( + f"mapping common aux coordinates, mapping={self.mapping}" + ) + + if not self.mapped: + # Attempt to complete the mapping using src/tgt free dimensions. + # Note that, this may not be possible and result in an exception. + self._free_mapping( + src_dim_coverage, + tgt_dim_coverage, + src_aux_coverage, + tgt_aux_coverage, + ) + + # Attempt to transpose/reshape the cubes into compatible broadcast shapes. + # Note that, this may not be possible and result in an exception. + self._as_compatible_cubes() + + # Given the resultant broadcast shape, determine whether the + # mapping requires to be reversed. + broadcast_flip = ( + src_cube.ndim == tgt_cube.ndim + and self._tgt_cube_resolved.shape != self.shape + and self._src_cube_resolved.shape == self.shape + ) + + # Given the number of free dimensions, determine whether the + # mapping requires to be reversed. + src_free = set(src_dim_coverage.dims_free) & set( + src_aux_coverage.dims_free + ) + tgt_free = set(tgt_dim_coverage.dims_free) & set( + tgt_aux_coverage.dims_free + ) + free_flip = len(tgt_free) > len(src_free) + + # Reverse the mapping direction. + if broadcast_flip or free_flip: + flip_mapping = { + tgt_dim: src_dim for src_dim, tgt_dim in self.mapping.items() + } + self.map_rhs_to_lhs = not self.map_rhs_to_lhs + dmsg = ( + f"reversing the mapping from {self.mapping} to {flip_mapping}, " + f"now map_rhs_to_lhs={self.map_rhs_to_lhs}" + ) + logger.debug(dmsg) + self.mapping = flip_mapping + # Now require to transpose/reshape the cubes into compatible + # broadcast cubes again, due to possible non-commutative behaviour + # after reversing the mapping direction. + self._as_compatible_cubes() + + def _metadata_prepare(self): + # Initialise the state. + self.prepared_category = _CategoryItems( + items_dim=[], items_aux=[], items_scalar=[] + ) + self.prepared_factories = [] + + # Map RHS cube to LHS cube, or smaller to larger cube rank. + if self.map_rhs_to_lhs: + src_cube = self.rhs_cube + src_category_local = self.rhs_cube_category_local + src_dim_coverage = self.rhs_cube_dim_coverage + src_aux_coverage = self.rhs_cube_aux_coverage + tgt_cube = self.lhs_cube + tgt_category_local = self.lhs_cube_category_local + tgt_dim_coverage = self.lhs_cube_dim_coverage + tgt_aux_coverage = self.lhs_cube_aux_coverage + else: + src_cube = self.lhs_cube + src_category_local = self.lhs_cube_category_local + src_dim_coverage = self.lhs_cube_dim_coverage + src_aux_coverage = self.lhs_cube_aux_coverage + tgt_cube = self.rhs_cube + tgt_category_local = self.rhs_cube_category_local + tgt_dim_coverage = self.rhs_cube_dim_coverage + tgt_aux_coverage = self.rhs_cube_aux_coverage + + # Determine the resultant cube dim coordinate/s. + self._prepare_common_dim_payload(src_dim_coverage, tgt_dim_coverage) + + # Determine the resultant cube aux coordinate/s. + self._prepare_common_aux_payload( + src_aux_coverage.common_items_aux, # input + tgt_aux_coverage.common_items_aux, # input + self.prepared_category.items_aux, # output + ) + + # Determine the resultant cube scalar coordinate/s. + self._prepare_common_aux_payload( + src_aux_coverage.common_items_scalar, # input + tgt_aux_coverage.common_items_scalar, # input + self.prepared_category.items_scalar, # output + ignore_mismatch=True, + ) + + self._prepare_local_payload( + src_dim_coverage, + src_aux_coverage, + tgt_dim_coverage, + tgt_aux_coverage, + ) + + self._prepare_factory_payload( + tgt_cube, tgt_category_local, from_src=False + ) + self._prepare_factory_payload(src_cube, src_category_local) + + def _metadata_resolve(self): + """ + Categorise the coordinate metadata of the cubes into three distinct + groups; metadata from coordinates only available (local) on the LHS + cube, metadata from coordinates only available (local) on the RHS + cube, and metadata from coordinates common to both the LHS and RHS + cubes. + + This is only applicable to coordinates that are members of the + 'aux_coords' or 'dim_coords' of the participating cubes. + + """ + + # Determine the cube dim, aux and scalar coordinate items + # for each individual cube. + self.lhs_cube_category = self._categorise_items(self.lhs_cube) + self.rhs_cube_category = self._categorise_items(self.rhs_cube) + + def _categorise( + lhs_items, + rhs_items, + lhs_local_items, + rhs_local_items, + common_items, + ): + rhs_items_metadata = [item.metadata for item in rhs_items] + # Track common metadata here as a temporary convenience. + common_metadata = [] + + # Determine items local to the lhs, and shared items + # common to both lhs and rhs. + for item in lhs_items: + metadata = item.metadata + if metadata in rhs_items_metadata: + # The metadata is common between lhs and rhs. + if metadata not in common_metadata: + common_items.append(item) + common_metadata.append(metadata) + else: + # The metadata is local to the lhs. + lhs_local_items.append(item) + + # Determine items local to the rhs. + for item in rhs_items: + if item.metadata not in common_metadata: + rhs_local_items.append(item) + + # Determine local and common dim category items. + _categorise( + self.lhs_cube_category.items_dim, # input + self.rhs_cube_category.items_dim, # input + self.lhs_cube_category_local.items_dim, # output + self.rhs_cube_category_local.items_dim, # output + self.category_common.items_dim, # output + ) + + # Determine local and common aux category items. + _categorise( + self.lhs_cube_category.items_aux, # input + self.rhs_cube_category.items_aux, # input + self.lhs_cube_category_local.items_aux, # output + self.rhs_cube_category_local.items_aux, # output + self.category_common.items_aux, # output + ) + + # Determine local and common scalar category items. + _categorise( + self.lhs_cube_category.items_scalar, # input + self.rhs_cube_category.items_scalar, # input + self.lhs_cube_category_local.items_scalar, # output + self.rhs_cube_category_local.items_scalar, # output + self.category_common.items_scalar, # output + ) + + # Sort the resultant categories by metadata name for consistency, + # in-place. + categories = ( + self.lhs_cube_category, + self.rhs_cube_category, + self.lhs_cube_category_local, + self.rhs_cube_category_local, + self.category_common, + ) + key_func = lambda item: item.metadata.name() + + for category in categories: + category.items_dim.sort(key=key_func) + category.items_aux.sort(key=key_func) + category.items_scalar.sort(key=key_func) + + def _prepare_common_aux_payload( + self, + src_common_items, + tgt_common_items, + prepared_items, + ignore_mismatch=None, + ): + from iris.coords import AuxCoord + + if ignore_mismatch is None: + # Configure ability to ignore coordinate points/bounds + # mismatches between common items. + ignore_mismatch = False + + for src_item in src_common_items: + src_metadata = src_item.metadata + tgt_items = tuple( + filter( + lambda tgt_item: tgt_item.metadata == src_metadata, + tgt_common_items, + ) + ) + if not tgt_items: + dmsg = ( + f"ignoring src {self._src_cube_position} cube aux coordinate " + f"{src_metadata}, does not match any common tgt " + f"{self._tgt_cube_position} cube aux coordinate metadata" + ) + logger.debug(dmsg) + elif len(tgt_items) > 1: + dmsg = ( + f"ignoring src {self._src_cube_position} cube aux coordinate " + f"{src_metadata}, matches multiple [{len(tgt_items)}] common " + f"tgt {self._tgt_cube_position} cube aux coordinate metadata" + ) + logger.debug(dmsg) + else: + (tgt_item,) = tgt_items + src_coord = src_item.coord + tgt_coord = tgt_item.coord + points, bounds = self._prepare_points_and_bounds( + src_coord, + tgt_coord, + src_item.dims, + tgt_item.dims, + ignore_mismatch=ignore_mismatch, + ) + if points is not None: + src_type = type(src_coord) + tgt_type = type(tgt_coord) + # Downcast to aux if there are mixed container types. + container = src_type if src_type is tgt_type else AuxCoord + prepared_metadata = _PreparedMetadata( + combined=src_metadata.combine(tgt_item.metadata), + src=src_metadata, + tgt=tgt_item.metadata, + ) + prepared_item = _PreparedItem( + metadata=prepared_metadata, + points=points.copy(), + bounds=bounds if bounds is None else bounds.copy(), + dims=tgt_item.dims, + container=container, + ) + prepared_items.append(prepared_item) + + def _prepare_common_dim_payload( + self, src_coverage, tgt_coverage, ignore_mismatch=None + ): + from iris.coords import DimCoord + + if ignore_mismatch is None: + # Configure ability to ignore coordinate points/bounds + # mismatches between common items. + ignore_mismatch = False + + for src_dim in src_coverage.dims_common: + src_metadata = src_coverage.metadata[src_dim] + src_coord = src_coverage.coords[src_dim] + + tgt_dim = self.mapping[src_dim] + tgt_metadata = tgt_coverage.metadata[tgt_dim] + tgt_coord = tgt_coverage.coords[tgt_dim] + + points, bounds = self._prepare_points_and_bounds( + src_coord, + tgt_coord, + src_dim, + tgt_dim, + ignore_mismatch=ignore_mismatch, + ) + + if points is not None: + prepared_metadata = _PreparedMetadata( + combined=src_metadata.combine(tgt_metadata), + src=src_metadata, + tgt=tgt_metadata, + ) + prepared_item = _PreparedItem( + metadata=prepared_metadata, + points=points.copy(), + bounds=bounds if bounds is None else bounds.copy(), + dims=(tgt_dim,), + container=DimCoord, + ) + self.prepared_category.items_dim.append(prepared_item) + + def _prepare_factory_payload(self, cube, category_local, from_src=True): + def _get_prepared_item(metadata, from_src=True, from_local=False): + result = None + if from_local: + category = category_local + match = lambda item: item.metadata == metadata + else: + category = self.prepared_category + if from_src: + match = lambda item: item.metadata.src == metadata + else: + match = lambda item: item.metadata.tgt == metadata + for member in category._fields: + category_items = getattr(category, member) + matched_items = tuple(filter(match, category_items)) + if matched_items: + if len(matched_items) > 1: + dmsg = ( + f"ignoring factory dependency {metadata}, multiple {'src' if from_src else 'tgt'} " + f"{'local' if from_local else 'prepared'} metadata matches" + ) + logger.debug(dmsg) + else: + (item,) = matched_items + if from_local: + src = tgt = None + if from_src: + src = item.metadata + dims = tuple( + [self.mapping[dim] for dim in item.dims] + ) + else: + tgt = item.metadata + dims = item.dims + result = self._create_prepared_item( + item.coord, dims, src=src, tgt=tgt + ) + getattr(self.prepared_category, member).append( + result + ) + else: + result = item + break + return result + + for factory in cube.aux_factories: + container = type(factory) + dependencies = {} + prepared_item = None + + if tuple( + filter( + lambda item: item.container is container, + self.prepared_factories, + ) + ): + # debug: skipping, factory already exists + dmsg = ( + f"ignoring {'src' if from_src else 'tgt'} {container}, " + f"a similar factory has already been prepared" + ) + logger.debug(dmsg) + continue + + for ( + dependency_name, + dependency_coord, + ) in factory.dependencies.items(): + metadata = dependency_coord.metadata + prepared_item = _get_prepared_item(metadata, from_src=from_src) + if prepared_item is None: + prepared_item = _get_prepared_item( + metadata, from_src=from_src, from_local=True + ) + if prepared_item is None: + dmsg = f"cannot find matching {metadata} for {container} dependency {dependency_name}" + logger.debug(dmsg) + break + dependencies[dependency_name] = prepared_item.metadata + + if prepared_item is not None: + prepared_factory = _PreparedFactory( + container=container, dependencies=dependencies + ) + self.prepared_factories.append(prepared_factory) + else: + dmsg = f"ignoring {'src' if from_src else 'tgt'} {container}, cannot find all dependencies" + logger.debug(dmsg) + + def _prepare_local_payload_aux(self, src_aux_coverage, tgt_aux_coverage): + # Determine whether there are tgt dimensions not mapped to by an + # associated src dimension, and thus may be covered by any local + # tgt aux coordinates. + extra_tgt_dims = set(range(tgt_aux_coverage.cube.ndim)) - set( + self.mapping.values() + ) + + if LENIENT["maths"]: + mapped_src_dims = set(self.mapping.keys()) + mapped_tgt_dims = set(self.mapping.values()) + + # Add local src aux coordinates. + for item in src_aux_coverage.local_items_aux: + if all([dim in mapped_src_dims for dim in item.dims]): + tgt_dims = tuple([self.mapping[dim] for dim in item.dims]) + prepared_item = self._create_prepared_item( + item.coord, tgt_dims, src=item.metadata + ) + self.prepared_category.items_aux.append(prepared_item) + else: + dmsg = ( + f"ignoring local src {self._src_cube_position} cube " + f"aux coordinate {item.metadata}, as not all src " + f"dimensions {item.dims} are mapped" + ) + logger.debug(dmsg) + else: + # For strict maths, only local tgt aux coordinates covering + # the extra dimensions of the tgt cube may be added. + mapped_tgt_dims = set() + + # Add local tgt aux coordinates. + for item in tgt_aux_coverage.local_items_aux: + tgt_dims = item.dims + if all([dim in mapped_tgt_dims for dim in tgt_dims]) or any( + [dim in extra_tgt_dims for dim in tgt_dims] + ): + prepared_item = self._create_prepared_item( + item.coord, tgt_dims, tgt=item.metadata + ) + self.prepared_category.items_aux.append(prepared_item) + else: + dmsg = ( + f"ignoring local tgt {self._tgt_cube_position} cube " + f"aux coordinate {item.metadata}, as not all tgt " + f"dimensions {tgt_dims} are mapped" + ) + logger.debug(dmsg) + + def _prepare_local_payload_dim(self, src_dim_coverage, tgt_dim_coverage): + mapped_tgt_dims = self.mapping.values() + + # Determine whether there are tgt dimensions not mapped to by an + # associated src dimension, and thus may be covered by any local + # tgt dim coordinates. + extra_tgt_dims = set(range(tgt_dim_coverage.cube.ndim)) - set( + mapped_tgt_dims + ) + + if LENIENT["maths"]: + tgt_dims_conflict = set() + + # Add local src dim coordinates. + for src_dim in src_dim_coverage.dims_local: + tgt_dim = self.mapping[src_dim] + # Only add the local src dim coordinate iff there is no + # associated local tgt dim coordinate. + if tgt_dim not in tgt_dim_coverage.dims_local: + metadata = src_dim_coverage.metadata[src_dim] + coord = src_dim_coverage.coords[src_dim] + prepared_item = self._create_prepared_item( + coord, tgt_dim, src=metadata + ) + self.prepared_category.items_dim.append(prepared_item) + else: + tgt_dims_conflict.add(tgt_dim) + if self._debug: + src_metadata = src_dim_coverage.metadata[src_dim] + tgt_metadata = tgt_dim_coverage.metadata[tgt_dim] + dmsg = ( + f"ignoring local src {self._src_cube_position} cube " + f"dim coordinate {src_metadata}, as conflicts with " + f"tgt {self._tgt_cube_position} cube dim coordinate " + f"{tgt_metadata}, mapping ({src_dim},)->({tgt_dim},)" + ) + logger.debug(dmsg) + + # Determine whether there are any tgt dims free to be mapped + # by an available local tgt dim coordinate. + tgt_dims_unmapped = ( + set(tgt_dim_coverage.dims_local) - tgt_dims_conflict + ) + else: + # For strict maths, only local tgt dim coordinates covering + # the extra dimensions of the tgt cube may be added. + tgt_dims_unmapped = extra_tgt_dims + + # Add local tgt dim coordinates. + for tgt_dim in tgt_dims_unmapped: + if tgt_dim in mapped_tgt_dims or tgt_dim in extra_tgt_dims: + metadata = tgt_dim_coverage.metadata[tgt_dim] + if metadata is not None: + coord = tgt_dim_coverage.coords[tgt_dim] + prepared_item = self._create_prepared_item( + coord, tgt_dim, tgt=metadata + ) + self.prepared_category.items_dim.append(prepared_item) + + def _prepare_local_payload_scalar( + self, src_aux_coverage, tgt_aux_coverage + ): + # Add all local tgt scalar coordinates iff the src cube is a + # scalar cube with no local src scalar coordinates. + # Only for strict maths. + src_scalar_cube = ( + not LENIENT["maths"] + and src_aux_coverage.cube.ndim == 0 + and len(src_aux_coverage.local_items_scalar) == 0 + ) + + if src_scalar_cube or LENIENT["maths"]: + # Add any local src scalar coordinates, if available. + for item in src_aux_coverage.local_items_scalar: + prepared_item = self._create_prepared_item( + item.coord, item.dims, src=item.metadata + ) + self.prepared_category.items_scalar.append(prepared_item) + + # Add any local tgt scalar coordinates, if available. + for item in tgt_aux_coverage.local_items_scalar: + prepared_item = self._create_prepared_item( + item.coord, item.dims, tgt=item.metadata + ) + self.prepared_category.items_scalar.append(prepared_item) + + def _prepare_local_payload( + self, + src_dim_coverage, + src_aux_coverage, + tgt_dim_coverage, + tgt_aux_coverage, + ): + # Add local src/tgt dim coordinates. + self._prepare_local_payload_dim(src_dim_coverage, tgt_dim_coverage) + + # Add local src/tgt aux coordinates. + self._prepare_local_payload_aux(src_aux_coverage, tgt_aux_coverage) + + # Add local src/tgt scalar coordinates. + self._prepare_local_payload_scalar(src_aux_coverage, tgt_aux_coverage) + + def _prepare_points_and_bounds( + self, src_coord, tgt_coord, src_dims, tgt_dims, ignore_mismatch=None + ): + from iris.util import array_equal + + if ignore_mismatch is None: + # Configure ability to ignore coordinate points/bounds + # mismatches between common items. + ignore_mismatch = False + + points, bounds = None, None + + if not isinstance(src_dims, Iterable): + src_dims = (src_dims,) + + if not isinstance(tgt_dims, Iterable): + tgt_dims = (tgt_dims,) + + # Deal with coordinates that have been sliced. + if src_coord.ndim != tgt_coord.ndim: + if tgt_coord.ndim > src_coord.ndim: + # Use the tgt coordinate points/bounds. + points = tgt_coord.points + bounds = tgt_coord.bounds + else: + # Use the src coordinate points/bounds. + points = src_coord.points + bounds = src_coord.bounds + + # Deal with coordinates spanning broadcast dimensions. + if ( + points is None + and bounds is None + and src_coord.shape != tgt_coord.shape + ): + # Check whether the src coordinate is broadcasting. + dims = tuple([self.mapping[dim] for dim in src_dims]) + src_shape_broadcast = tuple([self.shape[dim] for dim in dims]) + src_cube_shape = self._src_cube.shape + src_shape = tuple([src_cube_shape[dim] for dim in src_dims]) + src_broadcasting = src_shape != src_shape_broadcast + + # Check whether the tgt coordinate is broadcasting. + tgt_shape_broadcast = tuple([self.shape[dim] for dim in tgt_dims]) + tgt_cube_shape = self._tgt_cube.shape + tgt_shape = tuple([tgt_cube_shape[dim] for dim in tgt_dims]) + tgt_broadcasting = tgt_shape != tgt_shape_broadcast + + if src_broadcasting and tgt_broadcasting: + emsg = ( + f"Cannot broadcast the coordinate {src_coord.name()!r} on " + f"{self._src_cube_position} cube {self._src_cube.name()!r} and " + f"coordinate {tgt_coord.name()!r} on " + f"{self._tgt_cube_position} cube {self._tgt_cube.name()!r} to " + f"broadcast shape {tgt_shape_broadcast}." + ) + raise ValueError(emsg) + elif src_broadcasting: + # Use the tgt coordinate points/bounds. + points = tgt_coord.points + bounds = tgt_coord.bounds + elif tgt_broadcasting: + # Use the src coordinate points/bounds. + points = src_coord.points + bounds = src_coord.bounds + + if points is None and bounds is None: + # Note that, this also ensures shape equality. + eq_points = array_equal( + src_coord.points, tgt_coord.points, withnans=True + ) + if eq_points: + points = src_coord.points + src_has_bounds = src_coord.has_bounds() + tgt_has_bounds = tgt_coord.has_bounds() + + if src_has_bounds and tgt_has_bounds: + src_bounds = src_coord.bounds + eq_bounds = array_equal( + src_bounds, tgt_coord.bounds, withnans=True + ) + + if eq_bounds: + bounds = src_bounds + else: + if LENIENT["maths"] and ignore_mismatch: + # For lenient, ignore coordinate with mis-matched bounds. + dmsg = ( + f"ignoring src {self._src_cube_position} cube " + f"{src_coord.metadata}, unequal bounds with " + f"tgt {self._tgt_cube_position} cube, " + f"{src_dims}->{tgt_dims}" + ) + logger.debug(dmsg) + else: + emsg = ( + f"Coordinate {src_coord.name()!r} has different bounds for the " + f"LHS cube {self.lhs_cube.name()!r} and " + f"RHS cube {self.rhs_cube.name()!r}." + ) + raise ValueError(emsg) + else: + # For lenient, use either of the coordinate bounds, if they exist. + if LENIENT["maths"]: + if src_has_bounds: + dmsg = ( + f"using src {self._src_cube_position} cube " + f"{src_coord.metadata} bounds, tgt has no bounds" + ) + logger.debug(dmsg) + bounds = src_coord.bounds + else: + dmsg = ( + f"using tgt {self._tgt_cube_position} cube " + f"{tgt_coord.metadata} bounds, src has no bounds" + ) + logger.debug(dmsg) + bounds = tgt_coord.bounds + else: + # For strict, both coordinates must have bounds, or both + # coordinates must not have bounds. + if src_has_bounds: + emsg = ( + f"Coordinate {src_coord.name()!r} has bounds for the " + f"{self._src_cube_position} cube {self._src_cube.name()!r}, " + f"but not the {self._tgt_cube_position} cube {self._tgt_cube.name()!r}." + ) + raise ValueError(emsg) + if tgt_has_bounds: + emsg = ( + f"Coordinate {tgt_coord.name()!r} has bounds for the " + f"{self._tgt_cube_position} cube {self._tgt_cube.name()!r}, " + f"but not the {self._src_cube_position} cube {self._src_cube.name()!r}." + ) + raise ValueError(emsg) + else: + if LENIENT["maths"] and ignore_mismatch: + # For lenient, ignore coordinate with mis-matched points. + dmsg = ( + f"ignoring src {self._src_cube_position} cube " + f"{src_coord.metadata}, unequal points with tgt " + f"{src_dims}->{tgt_dims}" + ) + logger.debug(dmsg) + else: + emsg = ( + f"Coordinate {src_coord.name()!r} has different points for the " + f"LHS cube {self.lhs_cube.name()!r} and " + f"RHS cube {self.rhs_cube.name()!r}." + ) + raise ValueError(emsg) + + return points, bounds + + @property + def _src_cube(self): + if self.map_rhs_to_lhs: + result = self.rhs_cube + else: + result = self.lhs_cube + return result + + @property + def _src_cube_position(self): + if self.map_rhs_to_lhs: + result = "RHS" + else: + result = "LHS" + return result + + @property + def _src_cube_resolved(self): + if self.map_rhs_to_lhs: + result = self.rhs_cube_resolved + else: + result = self.lhs_cube_resolved + return result + + @_src_cube_resolved.setter + def _src_cube_resolved(self, cube): + if self.map_rhs_to_lhs: + self.rhs_cube_resolved = cube + else: + self.lhs_cube_resolved = cube + + @property + def _tgt_cube(self): + if self.map_rhs_to_lhs: + result = self.lhs_cube + else: + result = self.rhs_cube + return result + + @property + def _tgt_cube_position(self): + if self.map_rhs_to_lhs: + result = "LHS" + else: + result = "RHS" + return result + + @property + def _tgt_cube_resolved(self): + if self.map_rhs_to_lhs: + result = self.lhs_cube_resolved + else: + result = self.rhs_cube_resolved + return result + + @_tgt_cube_resolved.setter + def _tgt_cube_resolved(self, cube): + if self.map_rhs_to_lhs: + self.lhs_cube_resolved = cube + else: + self.rhs_cube_resolved = cube + + def _tgt_cube_prepare(self, data): + cube = self._tgt_cube + + # Replace existing tgt cube data with the provided data. + cube.data = data + + # Clear the aux factories. + for factory in cube.aux_factories: + cube.remove_aux_factory(factory) + + # Clear the cube coordinates. + for coord in cube.coords(): + cube.remove_coord(coord) + + # Clear the cube cell measures. + for cm in cube.cell_measures(): + cube.remove_cell_measure(cm) + + # Clear the ancillary variables. + for av in cube.ancillary_variables(): + cube.remove_ancillary_variable(av) + + def cube(self, data, in_place=False): + from iris.cube import Cube + + expected_shape = self.shape + + # Ensure that we have been provided with candidate cubes, which are + # now resolved and metadata is prepared, ready and awaiting the + # resultant resolved cube. + if expected_shape is None: + emsg = ( + "Cannot resolve resultant cube, as no candidate cubes have " + "been provided." + ) + raise ValueError(emsg) + + if not hasattr(data, "shape"): + data = np.asanyarray(data) + + # Ensure that the shape of the provided data is the expected + # shape of the resultant resolved cube. + if data.shape != expected_shape: + emsg = ( + "Cannot resolve resultant cube, as the provided data must " + f"have shape {expected_shape}, got data shape {data.shape}." + ) + raise ValueError(emsg) + + if in_place: + result = self._tgt_cube + + if result.shape != expected_shape: + emsg = ( + "Cannot resolve resultant cube in-place, as the " + f"{self._tgt_cube_position} tgt cube {result.name()!r} " + f"requires data with shape {result.shape}, got data " + f"shape {data.shape}. Suggest not performing this " + "operation in-place." + ) + raise ValueError(emsg) + + # Prepare target cube for in-place population with the prepared + # metadata content and the provided data. + self._tgt_cube_prepare(data) + else: + # Create the resultant resolved cube with provided data. + result = Cube(data) + + # Add the combined cube metadata from both the candidate cubes. + result.metadata = self.lhs_cube.metadata.combine( + self.rhs_cube.metadata + ) + + # Add the prepared dim coordinates. + for item in self.prepared_category.items_dim: + coord = item.container(item.points, bounds=item.bounds) + coord.metadata = item.metadata.combined + result.add_dim_coord(coord, item.dims) + + # Add the prepared aux and scalar coordinates. + prepared_aux_coords = ( + self.prepared_category.items_aux + + self.prepared_category.items_scalar + ) + for item in prepared_aux_coords: + coord = item.container(item.points, bounds=item.bounds) + coord.metadata = item.metadata.combined + try: + result.add_aux_coord(coord, item.dims) + except ValueError as err: + scalar = dims = "" + if item.dims: + plural = "s" if len(item.dims) > 1 else "" + dims = f" with tgt dim{plural} {item.dims}" + else: + scalar = "scalar " + dmsg = ( + f"ignoring prepared {scalar}coordinate " + f"{coord.metadata}{dims}, got {err!r}" + ) + logger.debug(dmsg) + + # Add the prepared aux factories. + for prepared_factory in self.prepared_factories: + dependencies = dict() + for ( + dependency_name, + prepared_metadata, + ) in prepared_factory.dependencies.items(): + coord = result.coord(prepared_metadata.combined) + dependencies[dependency_name] = coord + factory = prepared_factory.container(**dependencies) + result.add_aux_factory(factory) + + return result + + @property + def mapped(self): + """ + Returns the state of whether all src cube dimensions have been + associated with relevant tgt cube dimensions. + + """ + return self._src_cube.ndim == len(self.mapping) + + @property + def shape(self): + """Returns the shape of the resultant resolved cube.""" + return getattr(self, "_broadcast_shape", None) diff --git a/lib/iris/coords.py b/lib/iris/coords.py index d50234c1b6..8fbe1abf56 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -31,6 +31,7 @@ CFVariableMixin, CellMeasureMetadata, CoordMetadata, + DimCoordMetadata, metadata_manager_factory, ) import iris.exceptions @@ -1335,7 +1336,8 @@ def __init__( """ # Configure the metadata manager. - self._metadata_manager = metadata_manager_factory(CoordMetadata) + if not hasattr(self, "_metadata_manager"): + self._metadata_manager = metadata_manager_factory(CoordMetadata) super().__init__( values=points, @@ -1834,8 +1836,9 @@ def is_compatible(self, other, ignore=None): Args: * other: - An instance of :class:`iris.coords.Coord` or - :class:`iris.common.CoordMetadata`. + An instance of :class:`iris.coords.Coord`, + :class:`iris.common.CoordMetadata` or + :class:`iris.common.DimCoordMetadata`. * ignore: A single attribute key or iterable of attribute keys to ignore when comparing the coordinates. Default is None. To ignore all @@ -2334,6 +2337,9 @@ def __init__( read-only points and bounds. """ + # Configure the metadata manager. + self._metadata_manager = metadata_manager_factory(DimCoordMetadata) + super().__init__( points, standard_name=standard_name, @@ -2347,7 +2353,7 @@ def __init__( ) #: Whether the coordinate wraps by ``coord.units.modulus``. - self.circular = bool(circular) + self.circular = circular def __deepcopy__(self, memo): """ @@ -2363,6 +2369,14 @@ def __deepcopy__(self, memo): new_coord._bounds_dm.data.flags.writeable = False return new_coord + @property + def circular(self): + return self._metadata_manager.circular + + @circular.setter + def circular(self, circular): + self._metadata_manager.circular = bool(circular) + def copy(self, points=None, bounds=None): new_coord = super().copy(points=points, bounds=bounds) # Make the arrays read-only. @@ -2372,13 +2386,13 @@ def copy(self, points=None, bounds=None): return new_coord def __eq__(self, other): - # TODO investigate equality of AuxCoord and DimCoord if circular is - # False. result = NotImplemented if isinstance(other, DimCoord): - result = ( - Coord.__eq__(self, other) and self.circular == other.circular - ) + # The "circular" member participates in DimCoord to DimCoord + # equivalence. We require to do this explicitly here + # as the "circular" member does NOT participate in + # DimCoordMetadata to DimCoordMetadata equivalence. + result = self.circular == other.circular and super().__eq__(other) return result # The __ne__ operator from Coord implements the not __eq__ method. diff --git a/lib/iris/cube.py b/lib/iris/cube.py index a4b5997ec8..6f916b989e 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -42,6 +42,7 @@ CFVariableMixin, CoordMetadata, CubeMetadata, + DimCoordMetadata, metadata_manager_factory, ) import iris.coord_systems @@ -1543,8 +1544,9 @@ def coords( (b) a coordinate instance with metadata equal to that of the desired coordinates. Accepts either a :class:`iris.coords.DimCoord`, :class:`iris.coords.AuxCoord`, - :class:`iris.aux_factory.AuxCoordFactory` - or :class:`iris.common.CoordMetadata`. + :class:`iris.aux_factory.AuxCoordFactory`, + :class:`iris.common.CoordMetadata` or + :class:`iris.common.DimCoordMetadata`. * standard_name The CF standard name of the desired coordinate. If None, does not check for standard name. @@ -1662,7 +1664,10 @@ def attr_filter(coord_): ] if coord is not None: - if isinstance(coord, CoordMetadata): + if hasattr(coord, "__class__") and coord.__class__ in ( + CoordMetadata, + DimCoordMetadata, + ): target_metadata = coord else: target_metadata = coord.metadata diff --git a/lib/iris/etc/logging.yaml b/lib/iris/etc/logging.yaml index c651de95b8..5671916ff9 100644 --- a/lib/iris/etc/logging.yaml +++ b/lib/iris/etc/logging.yaml @@ -4,15 +4,21 @@ formatters: basic: format: "%(asctime)s %(name)s %(levelname)s - %(message)s" datefmt: "%d-%m-%Y %H:%M:%S" - basic-func: + basic-cls-func: format: "%(asctime)s %(name)s %(levelname)s - %(message)s [%(cls)s.%(funcName)s]" datefmt: "%d-%m-%Y %H:%M:%S" + basic-func: + format: "%(asctime)s %(name)s %(levelname)s - %(message)s [%(funcName)s]" handlers: console: class: logging.StreamHandler formatter: basic stream: ext://sys.stdout + console-cls-func: + class: logging.StreamHandler + formatter: basic-cls-func + stream: ext://sys.stdout console-func: class: logging.StreamHandler formatter: basic-func @@ -20,6 +26,10 @@ handlers: loggers: iris.common.metadata: + level: INFO + handlers: [console-cls-func] + propagate: no + iris.common.resolve: level: INFO handlers: [console-func] propagate: no diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py index 9132e16680..b5b80a97ef 100644 --- a/lib/iris/tests/__init__.py +++ b/lib/iris/tests/__init__.py @@ -21,6 +21,7 @@ import codecs import collections +from collections.abc import Mapping import contextlib import datetime import difflib @@ -1004,6 +1005,78 @@ def assertArrayShapeStats(self, result, shape, mean, std_dev, rtol=1e-6): self.assertArrayAllClose(result.data.mean(), mean, rtol=rtol) self.assertArrayAllClose(result.data.std(), std_dev, rtol=rtol) + def assertDictEqual(self, lhs, rhs, msg=None): + """ + This method overrides unittest.TestCase.assertDictEqual (new in Python3.1) + in order to cope with dictionary comparison where the value of a key may + be a numpy array. + + """ + if not isinstance(lhs, Mapping): + emsg = ( + f"Provided LHS argument is not a 'Mapping', got {type(lhs)}." + ) + self.fail(emsg) + + if not isinstance(rhs, Mapping): + emsg = ( + f"Provided RHS argument is not a 'Mapping', got {type(rhs)}." + ) + self.fail(emsg) + + if set(lhs.keys()) != set(rhs.keys()): + emsg = f"{lhs!r} != {rhs!r}." + self.fail(emsg) + + for key in lhs.keys(): + lvalue, rvalue = lhs[key], rhs[key] + + if ma.isMaskedArray(lvalue) or ma.isMaskedArray(rvalue): + if not ma.isMaskedArray(lvalue): + emsg = ( + f"Dictionary key {key!r} values are not equal, " + f"the LHS value has type {type(lvalue)} and " + f"the RHS value has type {ma.core.MaskedArray}." + ) + raise AssertionError(emsg) + + if not ma.isMaskedArray(rvalue): + emsg = ( + f"Dictionary key {key!r} values are not equal, " + f"the LHS value has type {ma.core.MaskedArray} and " + f"the RHS value has type {type(lvalue)}." + ) + raise AssertionError(emsg) + + self.assertMaskedArrayEqual(lvalue, rvalue) + elif isinstance(lvalue, np.ndarray) or isinstance( + rvalue, np.ndarray + ): + if not isinstance(lvalue, np.ndarray): + emsg = ( + f"Dictionary key {key!r} values are not equal, " + f"the LHS value has type {type(lvalue)} and " + f"the RHS value has type {np.ndarray}." + ) + raise AssertionError(emsg) + + if not isinstance(rvalue, np.ndarray): + emsg = ( + f"Dictionary key {key!r} values are not equal, " + f"the LHS value has type {np.ndarray} and " + f"the RHS value has type {type(rvalue)}." + ) + raise AssertionError(emsg) + + self.assertArrayEqual(lvalue, rvalue) + else: + if lvalue != rvalue: + emsg = ( + f"Dictionary key {key!r} values are not equal, " + f"{lvalue!r} != {rvalue!r}." + ) + raise AssertionError(emsg) + # An environment variable controls whether test timings are output. # diff --git a/lib/iris/tests/integration/fast_load/test_fast_load.py b/lib/iris/tests/integration/fast_load/test_fast_load.py index 1aa781ebf1..ba50e389a8 100644 --- a/lib/iris/tests/integration/fast_load/test_fast_load.py +++ b/lib/iris/tests/integration/fast_load/test_fast_load.py @@ -377,7 +377,8 @@ def callback(cube, collation, filename): # Make an 'expected' from selected fields, with the expected attribute. expected = CubeList([flds[1], flds[3]]).merge() if not self.do_fast_loads: - expected[0].attributes["LBVC"] = 8 + # This is actually a NumPy int32, so honour that here. + expected[0].attributes["LBVC"] = np.int32(8) else: expected[0].attributes["A_LBVC"] = [8, 8] diff --git a/lib/iris/tests/results/analysis/abs.cml b/lib/iris/tests/results/analysis/abs.cml index e92f96e1cb..b0a37b6074 100644 --- a/lib/iris/tests/results/analysis/abs.cml +++ b/lib/iris/tests/results/analysis/abs.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/addition.cml b/lib/iris/tests/results/analysis/addition.cml index d673e73bb3..4f9600694d 100644 --- a/lib/iris/tests/results/analysis/addition.cml +++ b/lib/iris/tests/results/analysis/addition.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/addition_coord_x.cml b/lib/iris/tests/results/analysis/addition_coord_x.cml index af0c5ecc91..a086b8ad8b 100644 --- a/lib/iris/tests/results/analysis/addition_coord_x.cml +++ b/lib/iris/tests/results/analysis/addition_coord_x.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/addition_coord_y.cml b/lib/iris/tests/results/analysis/addition_coord_y.cml index ba8547b617..266e81c912 100644 --- a/lib/iris/tests/results/analysis/addition_coord_y.cml +++ b/lib/iris/tests/results/analysis/addition_coord_y.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/addition_different_std_name.cml b/lib/iris/tests/results/analysis/addition_different_std_name.cml index cb77adde99..14b0b42dd8 100644 --- a/lib/iris/tests/results/analysis/addition_different_std_name.cml +++ b/lib/iris/tests/results/analysis/addition_different_std_name.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/addition_in_place.cml b/lib/iris/tests/results/analysis/addition_in_place.cml index d673e73bb3..4f9600694d 100644 --- a/lib/iris/tests/results/analysis/addition_in_place.cml +++ b/lib/iris/tests/results/analysis/addition_in_place.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/addition_in_place_coord.cml b/lib/iris/tests/results/analysis/addition_in_place_coord.cml index 6ec39571c1..00dee609eb 100644 --- a/lib/iris/tests/results/analysis/addition_in_place_coord.cml +++ b/lib/iris/tests/results/analysis/addition_in_place_coord.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/addition_scalar.cml b/lib/iris/tests/results/analysis/addition_scalar.cml index d65d7492fe..daf0050069 100644 --- a/lib/iris/tests/results/analysis/addition_scalar.cml +++ b/lib/iris/tests/results/analysis/addition_scalar.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/apply_ifunc.cml b/lib/iris/tests/results/analysis/apply_ifunc.cml index f2bac40826..fe0e394ee6 100644 --- a/lib/iris/tests/results/analysis/apply_ifunc.cml +++ b/lib/iris/tests/results/analysis/apply_ifunc.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/apply_ifunc_frompyfunc.cml b/lib/iris/tests/results/analysis/apply_ifunc_frompyfunc.cml index 2faa06f4a5..29cb6f611e 100644 --- a/lib/iris/tests/results/analysis/apply_ifunc_frompyfunc.cml +++ b/lib/iris/tests/results/analysis/apply_ifunc_frompyfunc.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/apply_ufunc.cml b/lib/iris/tests/results/analysis/apply_ufunc.cml index f2bac40826..fe0e394ee6 100644 --- a/lib/iris/tests/results/analysis/apply_ufunc.cml +++ b/lib/iris/tests/results/analysis/apply_ufunc.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/apply_ufunc_frompyfunc.cml b/lib/iris/tests/results/analysis/apply_ufunc_frompyfunc.cml index d4239acbad..7b1511f028 100644 --- a/lib/iris/tests/results/analysis/apply_ufunc_frompyfunc.cml +++ b/lib/iris/tests/results/analysis/apply_ufunc_frompyfunc.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/division.cml b/lib/iris/tests/results/analysis/division.cml index bbe6c1eb90..762f51ec0a 100644 --- a/lib/iris/tests/results/analysis/division.cml +++ b/lib/iris/tests/results/analysis/division.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/division_by_array.cml b/lib/iris/tests/results/analysis/division_by_array.cml index cb77adde99..14b0b42dd8 100644 --- a/lib/iris/tests/results/analysis/division_by_array.cml +++ b/lib/iris/tests/results/analysis/division_by_array.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/division_by_latitude.cml b/lib/iris/tests/results/analysis/division_by_latitude.cml index 3e2abf69cd..42437d1e36 100644 --- a/lib/iris/tests/results/analysis/division_by_latitude.cml +++ b/lib/iris/tests/results/analysis/division_by_latitude.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/division_by_longitude.cml b/lib/iris/tests/results/analysis/division_by_longitude.cml index b1a0228dc8..264ce9b793 100644 --- a/lib/iris/tests/results/analysis/division_by_longitude.cml +++ b/lib/iris/tests/results/analysis/division_by_longitude.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/division_by_singular_coord.cml b/lib/iris/tests/results/analysis/division_by_singular_coord.cml index 7f7835a1be..4c9c58d760 100644 --- a/lib/iris/tests/results/analysis/division_by_singular_coord.cml +++ b/lib/iris/tests/results/analysis/division_by_singular_coord.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/division_scalar.cml b/lib/iris/tests/results/analysis/division_scalar.cml index cb77adde99..14b0b42dd8 100644 --- a/lib/iris/tests/results/analysis/division_scalar.cml +++ b/lib/iris/tests/results/analysis/division_scalar.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/exponentiate.cml b/lib/iris/tests/results/analysis/exponentiate.cml index a13c6be151..bb825f6714 100644 --- a/lib/iris/tests/results/analysis/exponentiate.cml +++ b/lib/iris/tests/results/analysis/exponentiate.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/log.cml b/lib/iris/tests/results/analysis/log.cml index 33214d01f1..c24e071dc5 100644 --- a/lib/iris/tests/results/analysis/log.cml +++ b/lib/iris/tests/results/analysis/log.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/log10.cml b/lib/iris/tests/results/analysis/log10.cml index fbee8f73f0..abd4065526 100644 --- a/lib/iris/tests/results/analysis/log10.cml +++ b/lib/iris/tests/results/analysis/log10.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/log2.cml b/lib/iris/tests/results/analysis/log2.cml index 6371f3925b..d121ad9a9d 100644 --- a/lib/iris/tests/results/analysis/log2.cml +++ b/lib/iris/tests/results/analysis/log2.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/multiply.cml b/lib/iris/tests/results/analysis/multiply.cml index 44996a9138..8fb8658f5d 100644 --- a/lib/iris/tests/results/analysis/multiply.cml +++ b/lib/iris/tests/results/analysis/multiply.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/multiply_different_std_name.cml b/lib/iris/tests/results/analysis/multiply_different_std_name.cml index 49f1779b77..2d89e5882f 100644 --- a/lib/iris/tests/results/analysis/multiply_different_std_name.cml +++ b/lib/iris/tests/results/analysis/multiply_different_std_name.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/sqrt.cml b/lib/iris/tests/results/analysis/sqrt.cml index 3a7bff138c..0dd0fe20b3 100644 --- a/lib/iris/tests/results/analysis/sqrt.cml +++ b/lib/iris/tests/results/analysis/sqrt.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/subtract.cml b/lib/iris/tests/results/analysis/subtract.cml index 7b0740888d..3466578756 100644 --- a/lib/iris/tests/results/analysis/subtract.cml +++ b/lib/iris/tests/results/analysis/subtract.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/subtract_array.cml b/lib/iris/tests/results/analysis/subtract_array.cml index cb77adde99..14b0b42dd8 100644 --- a/lib/iris/tests/results/analysis/subtract_array.cml +++ b/lib/iris/tests/results/analysis/subtract_array.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/subtract_coord_x.cml b/lib/iris/tests/results/analysis/subtract_coord_x.cml index c7aee8395b..060814c6ba 100644 --- a/lib/iris/tests/results/analysis/subtract_coord_x.cml +++ b/lib/iris/tests/results/analysis/subtract_coord_x.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/subtract_coord_y.cml b/lib/iris/tests/results/analysis/subtract_coord_y.cml index 355692b27b..4a9351cf6f 100644 --- a/lib/iris/tests/results/analysis/subtract_coord_y.cml +++ b/lib/iris/tests/results/analysis/subtract_coord_y.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/analysis/subtract_scalar.cml b/lib/iris/tests/results/analysis/subtract_scalar.cml index ab8e9d0d60..f458364143 100644 --- a/lib/iris/tests/results/analysis/subtract_scalar.cml +++ b/lib/iris/tests/results/analysis/subtract_scalar.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_all_dims.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_all_dims.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_last_dims.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_last_dims.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_middle_dim.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_middle_dim.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_zeroth_dim.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/collapse_zeroth_dim.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/slice.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/slice.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/slice.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/transposed.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/add/TestBroadcasting/transposed.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_all_dims.cml index 940661c230..d4a90d37ac 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_all_dims.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_last_dims.cml index 940661c230..d4a90d37ac 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_last_dims.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_middle_dim.cml index 940661c230..d4a90d37ac 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_middle_dim.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_zeroth_dim.cml index 940661c230..d4a90d37ac 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/collapse_zeroth_dim.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/slice.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/slice.cml index 940661c230..d4a90d37ac 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/slice.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/transposed.cml index 940661c230..d4a90d37ac 100644 --- a/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/divide/TestBroadcasting/transposed.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_all_dims.cml index b646e8b550..7ae36e51c3 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_all_dims.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_last_dims.cml index b646e8b550..7ae36e51c3 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_last_dims.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_middle_dim.cml index b646e8b550..7ae36e51c3 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_middle_dim.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_zeroth_dim.cml index b646e8b550..7ae36e51c3 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/collapse_zeroth_dim.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/slice.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/slice.cml index b646e8b550..7ae36e51c3 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/slice.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/transposed.cml index b646e8b550..7ae36e51c3 100644 --- a/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/multiply/TestBroadcasting/transposed.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_all_dims.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_all_dims.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_all_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_all_dims.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_last_dims.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_last_dims.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_last_dims.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_last_dims.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_middle_dim.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_middle_dim.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_middle_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_middle_dim.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_zeroth_dim.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_zeroth_dim.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_zeroth_dim.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/collapse_zeroth_dim.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/slice.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/slice.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/slice.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/slice.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/transposed.cml b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/transposed.cml index c6e6271a63..bea6795b38 100644 --- a/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/transposed.cml +++ b/lib/iris/tests/results/unit/analysis/maths/subtract/TestBroadcasting/transposed.cml @@ -1,6 +1,9 @@ + + + diff --git a/lib/iris/tests/test_basic_maths.py b/lib/iris/tests/test_basic_maths.py index 94880d6ed1..a559ee0e8a 100644 --- a/lib/iris/tests/test_basic_maths.py +++ b/lib/iris/tests/test_basic_maths.py @@ -235,7 +235,11 @@ def test_addition_different_attributes(self): b.attributes["my attribute"] = "foobar" c = a + b self.assertIsNone(c.standard_name) - self.assertEqual(c.attributes, {}) + expected = { + "my attribute": "foobar", + "source": "Data from Met Office Unified Model", + } + self.assertEqual(expected, c.attributes) def test_apply_ufunc(self): a = self.cube @@ -344,10 +348,13 @@ def test_ifunc_call_fail(self): my_ifunc = iris.analysis.maths.IFunc(np.square, lambda a: a.units ** 2) - # should fail because giving 2 arguments to an ifunc that expects - # only one - with self.assertRaises(ValueError): - my_ifunc(a, a) + # should now NOT fail because giving 2 arguments to an ifunc that + # expects only one will now ignore the surplus argument and raise + # a logging message instead, and go on to perform the operation. + emsg = "ValueError not raised" + with self.assertRaisesRegex(AssertionError, emsg): + with self.assertRaises(ValueError): + my_ifunc(a, a) my_ifunc = iris.analysis.maths.IFunc( np.multiply, lambda a: cf_units.Unit("1") @@ -509,7 +516,11 @@ def test_multiplication_different_attributes(self): b.attributes["my attribute"] = "foobar" c = a * b self.assertIsNone(c.standard_name) - self.assertEqual(c.attributes, {}) + expected = { + "source": "Data from Met Office Unified Model", + "my attribute": "foobar", + } + self.assertEqual(expected, c.attributes) def test_multiplication_in_place(self): a = self.cube.copy() diff --git a/lib/iris/tests/unit/common/lenient/test__Lenient.py b/lib/iris/tests/unit/common/lenient/test__Lenient.py index cdcf3df9a3..d6bc2882d6 100644 --- a/lib/iris/tests/unit/common/lenient/test__Lenient.py +++ b/lib/iris/tests/unit/common/lenient/test__Lenient.py @@ -12,7 +12,7 @@ # importing anything else. import iris.tests as tests -from collections import Iterable +from collections.abc import Iterable from iris.common.lenient import ( _LENIENT_ENABLE_DEFAULT, diff --git a/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py b/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py index 600a26fb16..c3bb948185 100644 --- a/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py +++ b/lib/iris/tests/unit/common/metadata/test_BaseMetadata.py @@ -16,6 +16,9 @@ import unittest.mock as mock from unittest.mock import sentinel +import numpy.ma as ma +import numpy as np + from iris.common.lenient import _LENIENT, _qualname from iris.common.metadata import BaseMetadata, CubeMetadata @@ -391,11 +394,11 @@ def test_names_different_none(self): class Test__combine_lenient_attributes(tests.IrisTest): def setUp(self): self.values = OrderedDict( - one=sentinel.one, - two=sentinel.two, - three=sentinel.three, - four=sentinel.four, - five=sentinel.five, + one="one", + two="two", + three=np.int16(123), + four=np.arange(10), + five=ma.arange(10), ) self.cls = BaseMetadata self.metadata = self.cls(*(None,) * len(self.cls._fields)) @@ -406,11 +409,11 @@ def test_same(self): right = self.values.copy() result = self.metadata._combine_lenient_attributes(left, right) - expected = dict(**left) - self.assertEqual(expected, result) + expected = left + self.assertDictEqual(expected, result) result = self.metadata._combine_lenient_attributes(right, left) - self.assertEqual(expected, result) + self.assertDictEqual(expected, result) def test_different(self): left = self.values.copy() @@ -421,10 +424,10 @@ def test_different(self): expected = self.values.copy() for key in ["two", "four"]: del expected[key] - self.assertEqual(dict(expected), result) + self.assertDictEqual(expected, result) result = self.metadata._combine_lenient_attributes(right, left) - self.assertEqual(dict(expected), result) + self.assertDictEqual(expected, result) def test_different_none(self): left = self.values.copy() @@ -435,25 +438,91 @@ def test_different_none(self): expected = self.values.copy() for key in ["one", "three", "five"]: del expected[key] - self.assertEqual(dict(expected), result) + self.assertDictEqual(expected, result) result = self.metadata._combine_lenient_attributes(right, left) - self.assertEqual(dict(expected), result) + self.assertDictEqual(expected, result) def test_extra(self): left = self.values.copy() right = self.values.copy() - left["extra_left"] = sentinel.extra_left - right["extra_right"] = sentinel.extra_right + left["extra_left"] = "extra_left" + right["extra_right"] = "extra_right" result = self.metadata._combine_lenient_attributes(left, right) expected = self.values.copy() expected["extra_left"] = left["extra_left"] expected["extra_right"] = right["extra_right"] - self.assertEqual(dict(expected), result) + self.assertDictEqual(expected, result) result = self.metadata._combine_lenient_attributes(right, left) - self.assertEqual(dict(expected), result) + self.assertDictEqual(expected, result) + + +class Test__combine_strict_attributes(tests.IrisTest): + def setUp(self): + self.values = OrderedDict( + one="one", + two="two", + three=np.int32(123), + four=np.arange(10), + five=ma.arange(10), + ) + self.cls = BaseMetadata + self.metadata = self.cls(*(None,) * len(self.cls._fields)) + self.dummy = sentinel.dummy + + def test_same(self): + left = self.values.copy() + right = self.values.copy() + + result = self.metadata._combine_strict_attributes(left, right) + expected = left + self.assertDictEqual(expected, result) + + result = self.metadata._combine_strict_attributes(right, left) + self.assertDictEqual(expected, result) + + def test_different(self): + left = self.values.copy() + right = self.values.copy() + left["one"] = left["three"] = self.dummy + + result = self.metadata._combine_strict_attributes(left, right) + expected = self.values.copy() + for key in ["one", "three"]: + del expected[key] + self.assertDictEqual(expected, result) + + result = self.metadata._combine_strict_attributes(right, left) + self.assertDictEqual(expected, result) + + def test_different_none(self): + left = self.values.copy() + right = self.values.copy() + left["one"] = left["three"] = left["five"] = None + + result = self.metadata._combine_strict_attributes(left, right) + expected = self.values.copy() + for key in ["one", "three", "five"]: + del expected[key] + self.assertDictEqual(expected, result) + + result = self.metadata._combine_strict_attributes(right, left) + self.assertDictEqual(expected, result) + + def test_extra(self): + left = self.values.copy() + right = self.values.copy() + left["extra_left"] = "extra_left" + right["extra_right"] = "extra_right" + + result = self.metadata._combine_strict_attributes(left, right) + expected = self.values.copy() + self.assertDictEqual(expected, result) + + result = self.metadata._combine_strict_attributes(right, left) + self.assertDictEqual(expected, result) class Test__compare_lenient(tests.IrisTest): @@ -479,14 +548,15 @@ def test_name_same(self): self.assertTrue(lmetadata._compare_lenient(rmetadata)) self.assertTrue(rmetadata._compare_lenient(lmetadata)) - expected = (len(self.cls._fields) - 1) * 2 + # mocker not called for "units" nor "var_name" members. + expected = (len(self.cls._fields) - 2) * 2 self.assertEqual(expected, mocker.call_count) - def test_name_same_lenient_false(self): + def test_name_same_lenient_false__long_name_different(self): left = self.none.copy() left.update(self.names) - right = self.none.copy() - right["long_name"] = sentinel.standard_name + right = left.copy() + right["long_name"] = sentinel.dummy lmetadata = self.cls(**left) rmetadata = self.cls(**right) @@ -496,7 +566,26 @@ def test_name_same_lenient_false(self): self.assertFalse(lmetadata._compare_lenient(rmetadata)) self.assertFalse(rmetadata._compare_lenient(lmetadata)) - expected = (len(self.cls._fields) - 1) * 2 + # mocker not called for "units" nor "var_name" members. + expected = (len(self.cls._fields) - 2) * 2 + self.assertEqual(expected, mocker.call_count) + + def test_name_same_lenient_true__var_name_different(self): + left = self.none.copy() + left.update(self.names) + right = left.copy() + right["var_name"] = sentinel.dummy + lmetadata = self.cls(**left) + rmetadata = self.cls(**right) + + with mock.patch.object( + self.cls, "_is_attributes", return_value=False + ) as mocker: + self.assertTrue(lmetadata._compare_lenient(rmetadata)) + self.assertTrue(rmetadata._compare_lenient(lmetadata)) + + # mocker not called for "units" nor "var_name" members. + expected = (len(self.cls._fields) - 2) * 2 self.assertEqual(expected, mocker.call_count) def test_name_different(self): @@ -527,7 +616,8 @@ def test_strict_units(self): self.assertTrue(lmetadata._compare_lenient(rmetadata)) self.assertTrue(rmetadata._compare_lenient(lmetadata)) - expected = (len(self.cls._fields) - 1) * 2 + # mocker not called for "units" nor "var_name" members. + expected = (len(self.cls._fields) - 2) * 2 self.assertEqual(expected, mocker.call_count) def test_strict_units_different(self): @@ -545,7 +635,8 @@ def test_strict_units_different(self): self.assertFalse(lmetadata._compare_lenient(rmetadata)) self.assertFalse(rmetadata._compare_lenient(lmetadata)) - expected = (len(self.cls._fields) - 1) * 2 + # mocker not called for "units" nor "var_name" members. + expected = (len(self.cls._fields) - 2) * 2 self.assertEqual(expected, mocker.call_count) def test_attributes(self): @@ -612,9 +703,9 @@ def setUp(self): self.values = OrderedDict( one=sentinel.one, two=sentinel.two, - three=sentinel.three, - four=sentinel.four, - five=sentinel.five, + three=np.int16(123), + four=np.arange(10), + five=ma.arange(5), ) self.cls = BaseMetadata self.metadata = self.cls(*(None,) * len(self.cls._fields)) @@ -661,6 +752,52 @@ def test_extra(self): self.assertTrue(self.metadata._compare_lenient_attributes(right, left)) +class Test__compare_strict_attributes(tests.IrisTest): + def setUp(self): + self.values = OrderedDict( + one=sentinel.one, + two=sentinel.two, + three=np.int16(123), + four=np.arange(10), + five=ma.arange(5), + ) + self.cls = BaseMetadata + self.metadata = self.cls(*(None,) * len(self.cls._fields)) + self.dummy = sentinel.dummy + + def test_same(self): + left = self.values.copy() + right = self.values.copy() + + self.assertTrue(self.metadata._compare_strict_attributes(left, right)) + self.assertTrue(self.metadata._compare_strict_attributes(right, left)) + + def test_different(self): + left = self.values.copy() + right = self.values.copy() + left["two"] = left["four"] = self.dummy + + self.assertFalse(self.metadata._compare_strict_attributes(left, right)) + self.assertFalse(self.metadata._compare_strict_attributes(right, left)) + + def test_different_none(self): + left = self.values.copy() + right = self.values.copy() + left["one"] = left["three"] = left["five"] = None + + self.assertFalse(self.metadata._compare_strict_attributes(left, right)) + self.assertFalse(self.metadata._compare_strict_attributes(right, left)) + + def test_extra(self): + left = self.values.copy() + right = self.values.copy() + left["extra_left"] = sentinel.extra_left + right["extra_right"] = sentinel.extra_right + + self.assertFalse(self.metadata._compare_strict_attributes(left, right)) + self.assertFalse(self.metadata._compare_strict_attributes(right, left)) + + class Test__difference(tests.IrisTest): def setUp(self): self.kwargs = dict( @@ -926,9 +1063,9 @@ def setUp(self): self.values = OrderedDict( one=sentinel.one, two=sentinel.two, - three=sentinel.three, - four=sentinel.four, - five=sentinel.five, + three=np.float(3.14), + four=np.arange(10, dtype=np.float), + five=ma.arange(10, dtype=np.int16), ) self.cls = BaseMetadata self.metadata = self.cls(*(None,) * len(self.cls._fields)) @@ -953,12 +1090,15 @@ def test_different(self): for key in ["one", "three", "five"]: del left[key] del right[key] - expected = (dict(left), dict(right)) - self.assertEqual(expected, result) + expected_left, expected_right = (left, right) + result_left, result_right = result + self.assertDictEqual(expected_left, result_left) + self.assertDictEqual(expected_right, result_right) result = self.metadata._difference_lenient_attributes(right, left) - expected = (dict(right), dict(left)) - self.assertEqual(expected, result) + result_left, result_right = result + self.assertDictEqual(expected_right, result_left) + self.assertDictEqual(expected_left, result_right) def test_different_none(self): left = self.values.copy() @@ -969,12 +1109,15 @@ def test_different_none(self): for key in ["two", "four"]: del left[key] del right[key] - expected = (dict(left), dict(right)) - self.assertEqual(expected, result) + expected_left, expected_right = (left, right) + result_left, result_right = result + self.assertDictEqual(expected_left, result_left) + self.assertDictEqual(expected_right, result_right) result = self.metadata._difference_lenient_attributes(right, left) - expected = (dict(right), dict(left)) - self.assertEqual(expected, result) + result_left, result_right = result + self.assertDictEqual(expected_right, result_left) + self.assertDictEqual(expected_left, result_right) def test_extra(self): left = self.values.copy() @@ -982,9 +1125,6 @@ def test_extra(self): left["extra_left"] = sentinel.extra_left right["extra_right"] = sentinel.extra_right result = self.metadata._difference_lenient_attributes(left, right) - expected = self.values.copy() - expected["extra_left"] = left["extra_left"] - expected["extra_right"] = right["extra_right"] self.assertIsNone(result) result = self.metadata._difference_lenient_attributes(right, left) @@ -996,9 +1136,9 @@ def setUp(self): self.values = OrderedDict( one=sentinel.one, two=sentinel.two, - three=sentinel.three, - four=sentinel.four, - five=sentinel.five, + three=np.int32(123), + four=np.arange(10), + five=ma.arange(10), ) self.cls = BaseMetadata self.metadata = self.cls(*(None,) * len(self.cls._fields)) @@ -1024,17 +1164,14 @@ def test_different(self): for key in ["two", "four"]: del expected_left[key] del expected_right[key] - expected = (expected_left, expected_right) - self.assertEqual(expected, result) + result_left, result_right = result + self.assertDictEqual(expected_left, result_left) + self.assertDictEqual(expected_right, result_right) result = self.metadata._difference_strict_attributes(right, left) - expected_left = left.copy() - expected_right = right.copy() - for key in ["two", "four"]: - del expected_left[key] - del expected_right[key] - expected = (expected_right, expected_left) - self.assertEqual(expected, result) + result_left, result_right = result + self.assertDictEqual(expected_right, result_left) + self.assertDictEqual(expected_left, result_right) def test_different_none(self): left = self.values.copy() @@ -1047,17 +1184,14 @@ def test_different_none(self): for key in ["two", "four"]: del expected_left[key] del expected_right[key] - expected = (expected_left, expected_right) - self.assertEqual(expected, result) + result_left, result_right = result + self.assertDictEqual(expected_left, result_left) + self.assertDictEqual(expected_right, result_right) result = self.metadata._difference_strict_attributes(right, left) - expected_left = left.copy() - expected_right = right.copy() - for key in ["two", "four"]: - del expected_left[key] - del expected_right[key] - expected = (expected_right, expected_left) - self.assertEqual(expected, result) + result_left, result_right = result + self.assertDictEqual(expected_right, result_left) + self.assertDictEqual(expected_left, result_right) def test_extra(self): left = self.values.copy() @@ -1068,14 +1202,14 @@ def test_extra(self): result = self.metadata._difference_strict_attributes(left, right) expected_left = dict(extra_left=left["extra_left"]) expected_right = dict(extra_right=right["extra_right"]) - expected = (expected_left, expected_right) - self.assertEqual(expected, result) + result_left, result_right = result + self.assertDictEqual(expected_left, result_left) + self.assertDictEqual(expected_right, result_right) result = self.metadata._difference_strict_attributes(right, left) - expected_left = dict(extra_left=left["extra_left"]) - expected_right = dict(extra_right=right["extra_right"]) - expected = (expected_right, expected_left) - self.assertEqual(expected, result) + result_left, result_right = result + self.assertDictEqual(expected_right, result_left) + self.assertDictEqual(expected_left, result_right) class Test__is_attributes(tests.IrisTest): diff --git a/lib/iris/tests/unit/common/metadata/test__hexdigest.py b/lib/iris/tests/unit/common/metadata/test__hexdigest.py new file mode 100644 index 0000000000..798f71bcd0 --- /dev/null +++ b/lib/iris/tests/unit/common/metadata/test__hexdigest.py @@ -0,0 +1,179 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :func:`iris.common.metadata._hexdigest`. + +""" + +# Import iris.tests first so that some things can be initialised before +# importing anything else. +import iris.tests as tests + +from unittest import mock + +import numpy.ma as ma +import numpy as np +from xxhash import xxh64, xxh64_hexdigest + +from iris.common.metadata import _hexdigest as hexdigest + + +class TestBytesLikeObject(tests.IrisTest): + def setUp(self): + self.hasher = xxh64() + self.hasher.reset() + + @staticmethod + def _ndarray(value): + parts = str((value.shape, xxh64_hexdigest(value))) + return xxh64_hexdigest(parts) + + @staticmethod + def _masked(value): + parts = str( + ( + value.shape, + xxh64_hexdigest(value.data), + xxh64_hexdigest(value.mask), + ) + ) + return xxh64_hexdigest(parts) + + def test_string(self): + value = "hello world" + self.hasher.update(value) + expected = self.hasher.hexdigest() + self.assertEqual(expected, hexdigest(value)) + + def test_numpy_array_int(self): + value = np.arange(10, dtype=np.int) + expected = self._ndarray(value) + self.assertEqual(expected, hexdigest(value)) + + def test_numpy_array_float(self): + value = np.arange(10, dtype=np.float) + expected = self._ndarray(value) + self.assertEqual(expected, hexdigest(value)) + + def test_numpy_array_float_not_int(self): + ivalue = np.arange(10, dtype=np.int) + fvalue = np.arange(10, dtype=np.float) + expected = self._ndarray(ivalue) + self.assertNotEqual(expected, hexdigest(fvalue)) + + def test_numpy_array_reshape(self): + value = np.arange(10).reshape(2, 5) + expected = self._ndarray(value) + self.assertEqual(expected, hexdigest(value)) + + def test_numpy_array_reshape_not_flat(self): + value = np.arange(10).reshape(2, 5) + expected = self._ndarray(value) + self.assertNotEqual(expected, hexdigest(value.flatten())) + + def test_masked_array_int(self): + value = ma.arange(10, dtype=np.int) + expected = self._masked(value) + self.assertEqual(expected, hexdigest(value)) + + value[0] = ma.masked + self.assertNotEqual(expected, hexdigest(value)) + expected = self._masked(value) + self.assertEqual(expected, hexdigest(value)) + + def test_masked_array_float(self): + value = ma.arange(10, dtype=np.float) + expected = self._masked(value) + self.assertEqual(expected, hexdigest(value)) + + value[0] = ma.masked + self.assertNotEqual(expected, hexdigest(value)) + expected = self._masked(value) + self.assertEqual(expected, hexdigest(value)) + + def test_masked_array_float_not_int(self): + ivalue = ma.arange(10, dtype=np.int) + fvalue = ma.arange(10, dtype=np.float) + expected = self._masked(ivalue) + self.assertNotEqual(expected, hexdigest(fvalue)) + + def test_masked_array_not_array(self): + value = ma.arange(10) + expected = self._masked(value) + self.assertNotEqual(expected, hexdigest(value.data)) + + def test_masked_array_reshape(self): + value = ma.arange(10).reshape(2, 5) + expected = self._masked(value) + self.assertEqual(expected, hexdigest(value)) + + def test_masked_array_reshape_not_flat(self): + value = ma.arange(10).reshape(2, 5) + expected = self._masked(value) + self.assertNotEqual(expected, hexdigest(value.flatten())) + + +class TestNotBytesLikeObject(tests.IrisTest): + def _expected(self, value): + parts = str((type(value), value)) + return xxh64_hexdigest(parts) + + def test_int(self): + value = 123 + expected = self._expected(value) + self.assertEqual(expected, hexdigest(value)) + + def test_numpy_int(self): + value = np.int(123) + expected = self._expected(value) + self.assertEqual(expected, hexdigest(value)) + + def test_float(self): + value = 123.4 + expected = self._expected(value) + self.assertEqual(expected, hexdigest(value)) + + def test_numpy_float(self): + value = np.float(123.4) + expected = self._expected(value) + self.assertEqual(expected, hexdigest(value)) + + def test_list(self): + value = [1, 2, 3] + expected = self._expected(value) + self.assertEqual(expected, hexdigest(value)) + + def test_tuple(self): + value = (1, 2, 3) + expected = self._expected(value) + self.assertEqual(expected, hexdigest(value)) + + def test_dict(self): + value = dict(one=1, two=2, three=3) + expected = self._expected(value) + self.assertEqual(expected, hexdigest(value)) + + def test_sentinel(self): + value = mock.sentinel.value + expected = self._expected(value) + self.assertEqual(expected, hexdigest(value)) + + def test_instance(self): + class Dummy: + pass + + value = Dummy() + expected = self._expected(value) + self.assertEqual(expected, hexdigest(value)) + + def test_int_not_str(self): + value = 123 + expected = self._expected(value) + self.assertNotEqual(expected, hexdigest(str(value))) + + +if __name__ == "__main__": + tests.main() diff --git a/requirements/core.txt b/requirements/core.txt index dbc0333d7c..56544d1926 100644 --- a/requirements/core.txt +++ b/requirements/core.txt @@ -12,3 +12,4 @@ matplotlib<3.3 netcdf4 numpy>=1.14 scipy +xxhash #conda: python-xxhash