From 8a7c3a6d3b501906b2befb586cac4edc1e111d00 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Tue, 6 Oct 2020 23:29:40 -0600 Subject: [PATCH 01/36] MNT: Elminate some test warnings Don't rely on implicit conversion of quantities within tuples. --- tests/calc/test_calc_tools.py | 34 ++++++++++++++++++++++------------ 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/tests/calc/test_calc_tools.py b/tests/calc/test_calc_tools.py index 4eeda3f40fe..37212aa7398 100644 --- a/tests/calc/test_calc_tools.py +++ b/tests/calc/test_calc_tools.py @@ -59,7 +59,9 @@ def test_find_intersections(direction, expected): y1 = 3 * x**2 y2 = 100 * x - 650 # Note: Truth is what we will get with this sampling, not the mathematical intersection - assert_array_almost_equal(expected, find_intersections(x, y1, y2, direction=direction), 2) + x_int, y_int = find_intersections(x, y1, y2, direction=direction) + assert_array_almost_equal(x_int, expected[0], 2) + assert_array_almost_equal(y_int, expected[1], 2) def test_find_intersections_no_intersections(): @@ -67,10 +69,10 @@ def test_find_intersections_no_intersections(): x = np.linspace(5, 30, 17) y1 = 3 * x + 0 y2 = 5 * x + 5 - # Note: Truth is what we will get with this sampling, not the mathematical intersection - truth = np.array([[], - []]) - assert_array_equal(truth, find_intersections(x, y1, y2)) + + x_int, y_int = find_intersections(x, y1, y2) + assert_array_equal(x_int, np.array([])) + assert_array_equal(y_int, np.array([])) def test_find_intersections_invalid_direction(): @@ -107,7 +109,9 @@ def test_find_intersections_intersections_in_data_at_ends(direction, expected): x = np.arange(14) y1 = np.array([0, 3, 2, 1, -1, 2, 2, 0, 1, 0, 0, -2, 2, 0]) y2 = np.zeros_like(y1) - assert_array_almost_equal(expected, find_intersections(x, y1, y2, direction=direction), 2) + x_int, y_int = find_intersections(x, y1, y2, direction=direction) + assert_array_almost_equal(x_int, expected[0], 2) + assert_array_almost_equal(y_int, expected[1], 2) @pytest.mark.parametrize('mask, expected_idx, expected_element', [ @@ -791,14 +795,16 @@ def test_gradient_2d(deriv_2d_data): [-3, -1, 4], [-3, -1, 4], [-3, -1, 4]])) - assert_array_almost_equal(res, truth, 5) + for r, t in zip(res, truth): + assert_array_almost_equal(r, t, 5) def test_gradient_4d(deriv_4d_data): """Test gradient with 4D arrays.""" res = gradient(deriv_4d_data, deltas=(1, 1, 1, 1)) truth = tuple(factor * np.ones_like(deriv_4d_data) for factor in (48., 16., 4., 1.)) - assert_array_almost_equal(res, truth, 8) + for r, t in zip(res, truth): + assert_array_almost_equal(r, t, 8) def test_gradient_restricted_axes(deriv_2d_data): @@ -813,7 +819,8 @@ def test_gradient_restricted_axes(deriv_2d_data): [[-3], [-1], [4]], [[-3], [-1], [4]], [[-3], [-1], [4]]])) - assert_array_almost_equal(res, truth, 5) + for r, t in zip(res, truth): + assert_array_almost_equal(r, t, 5) def test_bounding_indices(): @@ -991,7 +998,8 @@ def test_3d_gradient_3d_data_no_axes(deriv_4d_data): test = deriv_4d_data[0] res = gradient(test, deltas=(1, 1, 1)) truth = tuple(factor * np.ones_like(test) for factor in (16., 4., 1.)) - assert_array_almost_equal(res, truth, 8) + for r, t in zip(res, truth): + assert_array_almost_equal(r, t, 8) def test_2d_gradient_3d_data_no_axes(deriv_4d_data): @@ -1014,14 +1022,16 @@ def test_2d_gradient_4d_data_2_axes_3_deltas(deriv_4d_data): """Test 2D gradient of 4D data with 2 axes and 3 deltas.""" res = gradient(deriv_4d_data, deltas=(1, 1, 1), axes=(-2, -1)) truth = tuple(factor * np.ones_like(deriv_4d_data) for factor in (4., 1.)) - assert_array_almost_equal(res, truth, 8) + for r, t in zip(res, truth): + assert_array_almost_equal(r, t, 8) def test_2d_gradient_4d_data_2_axes_2_deltas(deriv_4d_data): """Test 2D gradient of 4D data with 2 axes and 2 deltas.""" res = gradient(deriv_4d_data, deltas=(1, 1), axes=(0, 1)) truth = tuple(factor * np.ones_like(deriv_4d_data) for factor in (48., 16.)) - assert_array_almost_equal(res, truth, 8) + for r, t in zip(res, truth): + assert_array_almost_equal(r, t, 8) def test_2d_gradient_4d_data_2_axes_1_deltas(deriv_4d_data): From c42a5e264eaba9c4f628965aba0bac30e4436736 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Tue, 6 Oct 2020 23:31:21 -0600 Subject: [PATCH 02/36] MNT: Fix some xarray test warnings Mainly by using the proper test assert function. Also, don't repeat calls in one place, but use the results that were already called with silenced warnings. --- tests/test_xarray.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/test_xarray.py b/tests/test_xarray.py index ef767f56972..212c07a6c13 100644 --- a/tests/test_xarray.py +++ b/tests/test_xarray.py @@ -193,7 +193,7 @@ def test_quantify(test_ds_generic): assert is_quantity(result.data) assert result.data.units == units.kelvin assert 'units' not in result.attrs - np.testing.assert_array_almost_equal(result.data, units.Quantity(original)) + assert_array_almost_equal(result.data, units.Quantity(original, 'K')) def test_dequantify(): @@ -213,9 +213,9 @@ def test_dataset_quantify(test_ds_generic): assert is_quantity(result['test'].data) assert result['test'].data.units == units.kelvin assert 'units' not in result['test'].attrs - np.testing.assert_array_almost_equal( + assert_array_almost_equal( result['test'].data, - units.Quantity(test_ds_generic['test'].data) + units.Quantity(test_ds_generic['test'].data, 'K') ) assert result.attrs == test_ds_generic.attrs @@ -1354,7 +1354,7 @@ def test_preprocess_and_wrap_with_to_magnitude(): def func(a, b): return a * b - np.testing.assert_array_equal(func(data, data2), np.array([0, 0, 1])) + assert_array_equal(func(data, data2), np.array([0, 0, 1])) def test_preprocess_and_wrap_with_variable(): @@ -1377,9 +1377,9 @@ def func(a, b): result_21 = func(data2, data1) assert isinstance(result_12, xr.DataArray) - xr.testing.assert_identical(func(data1, data2), expected_12) + xr.testing.assert_identical(result_12, expected_12) assert is_quantity(result_21) - assert_array_equal(func(data2, data1), expected_21) + assert_array_equal(result_21, expected_21) def test_grid_deltas_from_dataarray_lonlat(test_da_lonlat): From c866db9e1884b59b7c180b720473d1687000a7fb Mon Sep 17 00:00:00 2001 From: Ryan May Date: Tue, 6 Oct 2020 23:34:25 -0600 Subject: [PATCH 03/36] MNT: Silence warnings from putting units in masked arrays --- src/metpy/calc/basic.py | 4 ++-- src/metpy/calc/thermo.py | 2 +- src/metpy/units.py | 1 + 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/metpy/calc/basic.py b/src/metpy/calc/basic.py index ae77c82510e..2f0fbf9e68e 100644 --- a/src/metpy/calc/basic.py +++ b/src/metpy/calc/basic.py @@ -405,8 +405,8 @@ def apparent_temperature(temperature, relative_humidity, speed, face_level_winds # NB: older numpy.ma.where does not return a masked array app_temperature = masked_array( np.ma.where(masked_array(wind_chill_temperature).mask, - heat_index_temperature.to(temperature.units), - wind_chill_temperature.to(temperature.units) + heat_index_temperature.m_as(temperature.units), + wind_chill_temperature.m_as(temperature.units) ), temperature.units) # If mask_undefined is False, then set any masked values to the temperature diff --git a/src/metpy/calc/thermo.py b/src/metpy/calc/thermo.py index 0f36e33efd9..4f17a00fd84 100644 --- a/src/metpy/calc/thermo.py +++ b/src/metpy/calc/thermo.py @@ -1638,7 +1638,7 @@ def saturation_equivalent_potential_temperature(pressure, temperature): e = saturation_vapor_pressure(temperature).to('hPa').magnitude r = saturation_mixing_ratio(pressure, temperature).magnitude - th_l = t * (1000 / (p - e)) ** mpconsts.kappa + th_l = t * (1000 / (p - e)) ** mpconsts.nounit.kappa th_es = th_l * np.exp((3036. / t - 1.78) * r * (1 + 0.448 * r)) return units.Quantity(th_es, units.kelvin) diff --git a/src/metpy/units.py b/src/metpy/units.py index 726939b18f7..9ae7c2c7071 100644 --- a/src/metpy/units.py +++ b/src/metpy/units.py @@ -209,6 +209,7 @@ def masked_array(data, data_units=None, **kwargs): """ if data_units is None: data_units = data.units + data = data.magnitude return units.Quantity(np.ma.masked_array(data, **kwargs), data_units) From 442800b4352d904199c2a7a38631e9c7842d1445 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Tue, 6 Oct 2020 23:36:01 -0600 Subject: [PATCH 04/36] MNT: Silence unit warnings from calling matplotlib functions --- src/metpy/plots/skewt.py | 7 ++++++- src/metpy/plots/station_plot.py | 4 +--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/metpy/plots/skewt.py b/src/metpy/plots/skewt.py index c5a2a2cf222..a349067b9d3 100644 --- a/src/metpy/plots/skewt.py +++ b/src/metpy/plots/skewt.py @@ -421,6 +421,10 @@ def plot_barbs(self, pressure, u, v, c=None, xloc=1.0, x_clip_radius=0.1, raise ValueError('To convert to plotting units, units must be attached to ' 'u and v wind components.') + # Drop units for u,v since they're not used and trigger warnings + u = getattr(u, 'magnitude', u) + v = getattr(v, 'magnitude', v) + # Assemble array of x-locations in axes space x = np.empty_like(pressure) x.fill(xloc) @@ -966,7 +970,8 @@ def plot_colormapped(self, u, v, c, intervals=None, colors=None, **kwargs): else: line_args = self._form_line_args(kwargs) - # Do the plotting + # Do the plotting -- drop units on c since it's not used + c = getattr(c, 'magnitude', c) lc = colored_line(u, v, c, **line_args) self.ax.add_collection(lc) return lc diff --git a/src/metpy/plots/station_plot.py b/src/metpy/plots/station_plot.py index 6bacd55ffd1..db2689a0bda 100644 --- a/src/metpy/plots/station_plot.py +++ b/src/metpy/plots/station_plot.py @@ -339,9 +339,7 @@ def _vector_plotting_units(u, v, plotting_units): 'u and v wind components.') # Strip units, CartoPy transform doesn't like - u = np.array(u) - v = np.array(v) - return u, v + return np.array(getattr(u, 'magnitude', u)), np.array(getattr(v, 'magnitude', v)) @staticmethod def _scalar_plotting_units(scalar_value, plotting_units): From 046355b16a6e789bcf2f41618232caadcf3a2ede Mon Sep 17 00:00:00 2001 From: Ryan May Date: Mon, 1 Nov 2021 13:27:59 -0600 Subject: [PATCH 05/36] MNT: Silence another Pandas warning when adding units attr --- tests/plots/test_declarative.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/plots/test_declarative.py b/tests/plots/test_declarative.py index e075215d0d6..3e894026384 100644 --- a/tests/plots/test_declarative.py +++ b/tests/plots/test_declarative.py @@ -1288,7 +1288,11 @@ def test_plotobs_units_with_formatter(ccrs): """Test using PlotObs with a field that both has units and a custom formatter.""" df = pd.read_csv(get_test_data('SFC_obs.csv', as_file_obj=False), infer_datetime_format=True, parse_dates=['valid']) - df.units = {'alti': 'inHg'} + + # Catch warning from Pandas due to setting units + with warnings.catch_warnings(): + warnings.simplefilter('ignore', UserWarning) + df.units = {'alti': 'inHg'} # Plot desired data obs = PlotObs() From efbe0b4cd0f9c6864fb898b582ba8b22f231f44d Mon Sep 17 00:00:00 2001 From: Ryan May Date: Mon, 1 Nov 2021 13:49:10 -0600 Subject: [PATCH 06/36] MNT: Fix spelling of "abbreviated" and "abbreviation" --- src/metpy/calc/tools.py | 14 +++++++------- tests/calc/test_calc_tools.py | 16 ++++++++-------- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/src/metpy/calc/tools.py b/src/metpy/calc/tools.py index 895ad216d7f..60d327be916 100644 --- a/src/metpy/calc/tools.py +++ b/src/metpy/calc/tools.py @@ -1773,11 +1773,11 @@ def parse_angle(input_dir): """ if isinstance(input_dir, str): - # abb_dirs = abbrieviated directions - abb_dirs = _clean_direction([_abbrieviate_direction(input_dir)]) + # abb_dirs = abbreviated directions + abb_dirs = _clean_direction([_abbreviate_direction(input_dir)]) elif hasattr(input_dir, '__len__'): # handle np.array, pd.Series, list, and array-like input_dir_str = ','.join(_clean_direction(input_dir, preprocess=True)) - abb_dir_str = _abbrieviate_direction(input_dir_str) + abb_dir_str = _abbreviate_direction(input_dir_str) abb_dirs = _clean_direction(abb_dir_str.split(',')) else: # handle unrecognizable scalar return np.nan @@ -1795,7 +1795,7 @@ def _clean_direction(dir_list, preprocess=False): for the_dir in dir_list] -def _abbrieviate_direction(ext_dir_str): +def _abbreviate_direction(ext_dir_str): """Convert extended (non-abbreviated) directions to abbreviation.""" return (ext_dir_str .upper() @@ -1889,12 +1889,12 @@ def angle_to_direction(input_angle, full=False, level=3): return dir_str_arr dir_str_arr = ','.join(dir_str_arr) - dir_str_arr = _unabbrieviate_direction(dir_str_arr) + dir_str_arr = _unabbreviate_direction(dir_str_arr) return dir_str_arr.replace(',', ' ') if scalar else dir_str_arr.split(',') -def _unabbrieviate_direction(abb_dir_str): - """Convert abbrieviated directions to non-abbrieviated direction.""" +def _unabbreviate_direction(abb_dir_str): + """Convert abbreviated directions to non-abbreviated direction.""" return (abb_dir_str .upper() .replace(UND, 'Undefined ') diff --git a/tests/calc/test_calc_tools.py b/tests/calc/test_calc_tools.py index 37212aa7398..01ed64cd657 100644 --- a/tests/calc/test_calc_tools.py +++ b/tests/calc/test_calc_tools.py @@ -697,15 +697,15 @@ def test_laplacian_2d(deriv_2d_data): assert_array_almost_equal(laplac, laplac_true, 5) -def test_parse_angle_abbrieviated(): - """Test abbrieviated directional text in degrees.""" +def test_parse_angle_abbreviated(): + """Test abbreviated directional text in degrees.""" expected_angles_degrees = FULL_CIRCLE_DEGREES output_angles_degrees = parse_angle(DIR_STRS[:-1]) assert_array_almost_equal(output_angles_degrees, expected_angles_degrees) def test_parse_angle_ext(): - """Test extended (unabbrieviated) directional text in degrees.""" + """Test extended (unabbreviated) directional text in degrees.""" test_dir_strs = ['NORTH', 'NORTHnorthEast', 'North_East', 'East__North_East', 'easT', 'east south east', 'south east', ' south southeast', 'SOUTH', 'SOUTH SOUTH WEST', 'southWEST', 'WEST south_WEST', @@ -716,7 +716,7 @@ def test_parse_angle_ext(): def test_parse_angle_mix_multiple(): - """Test list of extended (unabbrieviated) directional text in degrees in one go.""" + """Test list of extended (unabbreviated) directional text in degrees in one go.""" test_dir_strs = ['NORTH', 'nne', 'ne', 'east north east', 'easT', 'east se', 'south east', ' south southeast', 'SOUTH', 'SOUTH SOUTH WEST', 'sw', 'WEST south_WEST', @@ -727,7 +727,7 @@ def test_parse_angle_mix_multiple(): def test_parse_angle_none(): - """Test list of extended (unabbrieviated) directional text in degrees in one go.""" + """Test list of extended (unabbreviated) directional text in degrees in one go.""" test_dir_strs = None expected_angles_degrees = np.nan output_angles_degrees = parse_angle(test_dir_strs) @@ -735,7 +735,7 @@ def test_parse_angle_none(): def test_parse_angle_invalid_number(): - """Test list of extended (unabbrieviated) directional text in degrees in one go.""" + """Test list of extended (unabbreviated) directional text in degrees in one go.""" test_dir_strs = 365. expected_angles_degrees = np.nan output_angles_degrees = parse_angle(test_dir_strs) @@ -743,7 +743,7 @@ def test_parse_angle_invalid_number(): def test_parse_angle_invalid_arr(): - """Test list of extended (unabbrieviated) directional text in degrees in one go.""" + """Test list of extended (unabbreviated) directional text in degrees in one go.""" test_dir_strs = ['nan', None, np.nan, 35, 35.5, 'north', 'andrewiscool'] expected_angles_degrees = [np.nan, np.nan, np.nan, np.nan, np.nan, 0, np.nan] output_angles_degrees = parse_angle(test_dir_strs) @@ -751,7 +751,7 @@ def test_parse_angle_invalid_arr(): def test_parse_angle_mix_multiple_arr(): - """Test list of extended (unabbrieviated) directional text in degrees in one go.""" + """Test list of extended (unabbreviated) directional text in degrees in one go.""" test_dir_strs = np.array(['NORTH', 'nne', 'ne', 'east north east', 'easT', 'east se', 'south east', ' south southeast', 'SOUTH', 'SOUTH SOUTH WEST', 'sw', 'WEST south_WEST', From 0f98bfe355a70fb516d956b1c05e23e9de7d3469 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Tue, 2 Nov 2021 12:51:50 -0600 Subject: [PATCH 07/36] MNT: Avoid warnings from angle abbreviation Cleans up some unit handling to be more standard. --- src/metpy/calc/tools.py | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/src/metpy/calc/tools.py b/src/metpy/calc/tools.py index 60d327be916..16e2c8c2da2 100644 --- a/src/metpy/calc/tools.py +++ b/src/metpy/calc/tools.py @@ -25,19 +25,19 @@ UND = 'UND' UND_ANGLE = -999. -DIR_STRS = ( +DIR_STRS = [ 'N', 'NNE', 'NE', 'ENE', 'E', 'ESE', 'SE', 'SSE', 'S', 'SSW', 'SW', 'WSW', 'W', 'WNW', 'NW', 'NNW', UND -) # note the order matters! +] # note the order matters! MAX_DEGREE_ANGLE = units.Quantity(360, 'degree') BASE_DEGREE_MULTIPLIER = units.Quantity(22.5, 'degree') DIR_DICT = {dir_str: i * BASE_DEGREE_MULTIPLIER for i, dir_str in enumerate(DIR_STRS)} -DIR_DICT[UND] = np.nan +DIR_DICT[UND] = units.Quantity(np.nan, 'degree') @exporter.export @@ -1773,16 +1773,15 @@ def parse_angle(input_dir): """ if isinstance(input_dir, str): - # abb_dirs = abbreviated directions - abb_dirs = _clean_direction([_abbreviate_direction(input_dir)]) + abb_dir = _clean_direction([_abbreviate_direction(input_dir)])[0] + return DIR_DICT[abb_dir] elif hasattr(input_dir, '__len__'): # handle np.array, pd.Series, list, and array-like input_dir_str = ','.join(_clean_direction(input_dir, preprocess=True)) abb_dir_str = _abbreviate_direction(input_dir_str) abb_dirs = _clean_direction(abb_dir_str.split(',')) + return units.Quantity.from_list(itemgetter(*abb_dirs)(DIR_DICT)) else: # handle unrecognizable scalar - return np.nan - - return itemgetter(*abb_dirs)(DIR_DICT) + return units.Quantity(np.nan, 'degree') def _clean_direction(dir_list, preprocess=False): @@ -1846,11 +1845,10 @@ def angle_to_direction(input_angle, full=False, level=3): # clean any numeric strings, negatives, and None does not handle strings with alphabet input_angle = units.Quantity(np.array(input_angle).astype(float), origin_units) - input_angle[input_angle < 0] = units.Quantity(np.nan, origin_units) + input_angle[input_angle < 0] = np.nan - # normalizer used for angles > 360 degree to normalize between 0 - 360 - normalizer = np.array(input_angle.m / MAX_DEGREE_ANGLE.m, dtype=int) - norm_angles = abs(input_angle - MAX_DEGREE_ANGLE * normalizer) + # Normalize between 0 - 360 + norm_angles = input_angle % MAX_DEGREE_ANGLE if level == 3: nskip = 1 From 3edaae06a567ae0f8b9f5e3d981c822d8e2df056 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Fri, 1 Sep 2023 14:39:01 -0600 Subject: [PATCH 08/36] MNT: Add default pytest warning filter This silences some false alarm warnings coming from cython-built code. --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index ba816b8c960..883ee7e5406 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -103,6 +103,7 @@ norecursedirs = "build docs .idea" doctest_optionflags = "NORMALIZE_WHITESPACE" mpl-results-path = "test_output" xfail_strict = true +filterwarnings = ["ignore:numpy.ndarray size changed:RuntimeWarning"] [tool.ruff] line-length = 95 From e00e873a904cd3233aadc54cdcf12bcce5509b66 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Fri, 1 Sep 2023 15:04:11 -0600 Subject: [PATCH 09/36] TST: Adjust creation of xarray datetime64 coordinates This adjusts the creation to specify nanosecond-level precision explicitly to eliminate some xarray warnings. --- conftest.py | 5 ++--- tests/calc/test_calc_tools.py | 2 +- tests/calc/test_cross_sections.py | 2 +- tests/interpolate/test_slices.py | 2 +- 4 files changed, 5 insertions(+), 6 deletions(-) diff --git a/conftest.py b/conftest.py index 834167081da..6c18a74c6ce 100644 --- a/conftest.py +++ b/conftest.py @@ -111,9 +111,8 @@ def test_da_xy(): 'lambert_conformal': ([], '')}, coords={ 'time': xarray.DataArray( - numpy.array([numpy.datetime64('2018-07-01T00:00'), - numpy.datetime64('2018-07-01T06:00'), - numpy.datetime64('2018-07-01T12:00')]), + numpy.array(['2018-07-01T00:00', '2018-07-01T06:00', '2018-07-01T12:00'], + dtype='datetime64[ns]'), name='time', dims=['time'] ), diff --git a/tests/calc/test_calc_tools.py b/tests/calc/test_calc_tools.py index 01ed64cd657..12ab6fdb065 100644 --- a/tests/calc/test_calc_tools.py +++ b/tests/calc/test_calc_tools.py @@ -1116,7 +1116,7 @@ def test_first_derivative_xarray_time_subsecond_precision(): coords={'time': np.array(['2019-01-01T00:00:00.0', '2019-01-01T00:00:00.1', '2019-01-01T00:00:00.2'], - dtype='datetime64[ms]')}, + dtype='datetime64[ns]')}, attrs={'units': 'kelvin'}) deriv = first_derivative(test_da) diff --git a/tests/calc/test_cross_sections.py b/tests/calc/test_cross_sections.py index b4f70b18367..03dc4f865c8 100644 --- a/tests/calc/test_cross_sections.py +++ b/tests/calc/test_cross_sections.py @@ -67,7 +67,7 @@ def test_cross_xy(): }, coords={ 'time': xr.DataArray( - np.array([np.datetime64('2018-07-01T00:00')]), + np.array(['2018-07-01T00:00'], dtype='datetime64[ns]'), name='time', dims=['time'] ), diff --git a/tests/interpolate/test_slices.py b/tests/interpolate/test_slices.py index d992a928ff0..41eb6dc01fb 100644 --- a/tests/interpolate/test_slices.py +++ b/tests/interpolate/test_slices.py @@ -57,7 +57,7 @@ def test_ds_xy(): }, coords={ 'time': xr.DataArray( - np.array([np.datetime64('2018-07-01T00:00')]), + np.array(['2018-07-01T00:00'], dtype='datetime64[ns]'), name='time', dims=['time'] ), From 1d15314ba22d360e304436873c3705b1b1007615 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Wed, 8 Nov 2023 14:47:59 -0700 Subject: [PATCH 10/36] MNT: Bump minimum supported scipy to 1.8 This gives us access to QhullError directly in scipy.spatial. --- pyproject.toml | 2 +- tests/calc/test_thermo.py | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 883ee7e5406..91c134acd83 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,7 +32,7 @@ dependencies = [ "pint>=0.15", "pooch>=1.2.0", "pyproj>=3.0.0", - "scipy>=1.6.0", + "scipy>=1.8.0", "traitlets>=5.0.5", "xarray>=0.18.0" ] diff --git a/tests/calc/test_thermo.py b/tests/calc/test_thermo.py index 5e7aea34c9c..3c94e5caded 100644 --- a/tests/calc/test_thermo.py +++ b/tests/calc/test_thermo.py @@ -203,8 +203,6 @@ def test_moist_lapse_starting_points(start, direction): @pytest.mark.xfail(platform.machine() == 'arm64', reason='ValueError is not raised on Mac M2') @pytest.mark.xfail((sys.platform == 'win32') and version_check('scipy<1.11.3'), reason='solve_ivp() does not error on Windows + SciPy < 1.11.3') -@pytest.mark.xfail(version_check('scipy<1.7'), - reason='solve_ivp() does not error on Scipy < 1.7') def test_moist_lapse_failure(): """Test moist_lapse under conditions that cause the ODE solver to fail.""" p = np.logspace(3, -1, 10) * units.hPa From 0927a15a5b5e91203148e663c8d5e50df794dd43 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Fri, 1 Sep 2023 15:07:41 -0600 Subject: [PATCH 11/36] MNT: Adjust import of QhullError from Scipy This eliminates a deprecation warning. --- src/metpy/interpolate/points.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/metpy/interpolate/points.py b/src/metpy/interpolate/points.py index 36add3138a4..e33f7e19cb2 100644 --- a/src/metpy/interpolate/points.py +++ b/src/metpy/interpolate/points.py @@ -8,7 +8,7 @@ import numpy as np from scipy.interpolate import griddata, Rbf -from scipy.spatial import cKDTree, ConvexHull, Delaunay, qhull +from scipy.spatial import cKDTree, ConvexHull, Delaunay, QhullError from . import geometry, tools from ..package_tools import Exporter @@ -153,7 +153,7 @@ def natural_neighbor_point(xp, yp, variable, grid_loc, tri, neighbors, circumcen area_list.append(cur_area * value[0]) - except (ZeroDivisionError, qhull.QhullError) as e: + except (ZeroDivisionError, QhullError) as e: message = ('Error during processing of a grid. ' 'Interpolation will continue but be mindful ' f'of errors in output. {e}') From 28f75e96c3218ee8ef646c95d62f63377e6241ef Mon Sep 17 00:00:00 2001 From: Ryan May Date: Fri, 1 Sep 2023 17:40:11 -0600 Subject: [PATCH 12/36] MNT: Fixing up pandas deprecation warnings --- src/metpy/calc/basic.py | 3 ++ tests/io/test_gempak.py | 20 +++------ tests/plots/test_declarative.py | 75 +++++++++++++-------------------- 3 files changed, 38 insertions(+), 60 deletions(-) diff --git a/src/metpy/calc/basic.py b/src/metpy/calc/basic.py index 2f0fbf9e68e..4cd0e332a4c 100644 --- a/src/metpy/calc/basic.py +++ b/src/metpy/calc/basic.py @@ -829,6 +829,9 @@ def smooth_gaussian(scalar_grid, n): num_ax = len(scalar_grid.shape) # Assume the last two axes represent the horizontal directions sgma_seq = [sgma if i > num_ax - 3 else 0 for i in range(num_ax)] + # Drop units as necessary to avoid warnings from scipy doing so--units will be reattached + # if necessary by wrapper + scalar_grid = getattr(scalar_grid, 'magnitude', scalar_grid) filter_args = {'sigma': sgma_seq, 'truncate': 2 * np.sqrt(2)} if hasattr(scalar_grid, 'mask'): diff --git a/tests/io/test_gempak.py b/tests/io/test_gempak.py index 95517761608..25b7105a6de 100644 --- a/tests/io/test_gempak.py +++ b/tests/io/test_gempak.py @@ -159,18 +159,14 @@ def test_unmerged_sigw_pressure_sounding(): def test_standard_surface(): """Test to read a standard surface file.""" - def dtparse(string): - return datetime.strptime(string, '%y%m%d/%H%M') - skip = ['text', 'spcl'] gsf = GempakSurface(get_test_data('gem_std.sfc')) gstns = gsf.sfjson() - gempak = pd.read_csv(get_test_data('gem_std.csv'), - index_col=['STN', 'YYMMDD/HHMM'], - parse_dates=['YYMMDD/HHMM'], - date_parser=dtparse) + gempak = pd.read_csv(get_test_data('gem_std.csv')) + gempak['YYMMDD/HHMM'] = pd.to_datetime(gempak['YYMMDD/HHMM'], format='%y%m%d/%H%M') + gempak = gempak.set_index(['STN', 'YYMMDD/HHMM']) for stn in gstns: idx_key = (stn['properties']['station_id'], @@ -184,17 +180,13 @@ def dtparse(string): def test_ship_surface(): """Test to read a ship surface file.""" - def dtparse(string): - return datetime.strptime(string, '%y%m%d/%H%M') - skip = ['text', 'spcl'] gsf = GempakSurface(get_test_data('gem_ship.sfc')) - gempak = pd.read_csv(get_test_data('gem_ship.csv'), - index_col=['STN', 'YYMMDD/HHMM'], - parse_dates=['YYMMDD/HHMM'], - date_parser=dtparse) + gempak = pd.read_csv(get_test_data('gem_ship.csv')) + gempak['YYMMDD/HHMM'] = pd.to_datetime(gempak['YYMMDD/HHMM'], format='%y%m%d/%H%M') + gempak = gempak.set_index(['STN', 'YYMMDD/HHMM']) gempak.sort_index(inplace=True) uidx = gempak.index.unique() diff --git a/tests/plots/test_declarative.py b/tests/plots/test_declarative.py index 3e894026384..e1ac516b66a 100644 --- a/tests/plots/test_declarative.py +++ b/tests/plots/test_declarative.py @@ -1194,6 +1194,14 @@ def sample_obs(): columns=['time', 'stid', 'pressure', 'temperature', 'dewpoint']) +@pytest.fixture() +def pandas_sfc(): + """Open sample pandas data.""" + df = pd.read_csv(get_test_data('SFC_obs.csv', as_file_obj=False)) + df['valid'] = pd.to_datetime(df['valid'], format='%Y-%m-%d %H:%M:%S') + return df + + def test_plotobs_subset_default_nolevel(sample_obs): """Test PlotObs subsetting with minimal config.""" obs = PlotObs() @@ -1284,19 +1292,16 @@ def test_plotobs_subset_time_window_level(sample_obs): @pytest.mark.mpl_image_compare(remove_text=True, tolerance=0.016) -def test_plotobs_units_with_formatter(ccrs): +def test_plotobs_units_with_formatter(ccrs, pandas_sfc): """Test using PlotObs with a field that both has units and a custom formatter.""" - df = pd.read_csv(get_test_data('SFC_obs.csv', as_file_obj=False), - infer_datetime_format=True, parse_dates=['valid']) - # Catch warning from Pandas due to setting units with warnings.catch_warnings(): warnings.simplefilter('ignore', UserWarning) - df.units = {'alti': 'inHg'} + pandas_sfc.units = {'alti': 'inHg'} # Plot desired data obs = PlotObs() - obs.data = df + obs.data = pandas_sfc obs.time = datetime(1993, 3, 12, 12) obs.time_window = timedelta(minutes=15) obs.level = None @@ -1324,13 +1329,10 @@ def test_plotobs_units_with_formatter(ccrs): @pytest.mark.mpl_image_compare(remove_text=True, tolerance=0.025) -def test_declarative_sfc_obs(ccrs): +def test_declarative_sfc_obs(ccrs, pandas_sfc): """Test making a surface observation plot.""" - data = pd.read_csv(get_test_data('SFC_obs.csv', as_file_obj=False), - infer_datetime_format=True, parse_dates=['valid']) - obs = PlotObs() - obs.data = data + obs.data = pandas_sfc obs.time = datetime(1993, 3, 12, 12) obs.time_window = timedelta(minutes=15) obs.level = None @@ -1356,13 +1358,10 @@ def test_declarative_sfc_obs(ccrs): @pytest.mark.mpl_image_compare(remove_text=True, tolerance=0.025) -def test_declarative_sfc_obs_args(ccrs): +def test_declarative_sfc_obs_args(ccrs, pandas_sfc): """Test making a surface observation plot with mpl arguments.""" - data = pd.read_csv(get_test_data('SFC_obs.csv', as_file_obj=False), - infer_datetime_format=True, parse_dates=['valid']) - obs = PlotObs() - obs.data = data + obs.data = pandas_sfc obs.time = datetime(1993, 3, 12, 12) obs.time_window = timedelta(minutes=15) obs.level = None @@ -1390,13 +1389,10 @@ def test_declarative_sfc_obs_args(ccrs): @pytest.mark.mpl_image_compare(remove_text=True, tolerance=0.016) @needs_cartopy -def test_declarative_sfc_text(): +def test_declarative_sfc_text(pandas_sfc): """Test making a surface observation plot with text.""" - data = pd.read_csv(get_test_data('SFC_obs.csv', as_file_obj=False), - infer_datetime_format=True, parse_dates=['valid']) - obs = PlotObs() - obs.data = data + obs.data = pandas_sfc obs.time = datetime(1993, 3, 12, 12) obs.time_window = timedelta(minutes=15) obs.level = None @@ -1423,13 +1419,10 @@ def test_declarative_sfc_text(): @pytest.mark.mpl_image_compare(remove_text=True, tolerance=0.025) -def test_declarative_sfc_obs_changes(ccrs): +def test_declarative_sfc_obs_changes(ccrs, pandas_sfc): """Test making a surface observation plot, changing the field.""" - data = pd.read_csv(get_test_data('SFC_obs.csv', as_file_obj=False), - infer_datetime_format=True, parse_dates=['valid']) - obs = PlotObs() - obs.data = data + obs.data = pandas_sfc obs.time = datetime(1993, 3, 12, 12) obs.level = None obs.fields = ['tmpf'] @@ -1459,13 +1452,10 @@ def test_declarative_sfc_obs_changes(ccrs): @pytest.mark.mpl_image_compare(remove_text=True, tolerance=0.171) -def test_declarative_colored_barbs(ccrs): +def test_declarative_colored_barbs(ccrs, pandas_sfc): """Test making a surface plot with a colored barb (gh-1274).""" - data = pd.read_csv(get_test_data('SFC_obs.csv', as_file_obj=False), - infer_datetime_format=True, parse_dates=['valid']) - obs = PlotObs() - obs.data = data + obs.data = pandas_sfc obs.time = datetime(1993, 3, 12, 13) obs.level = None obs.vector_field = ('uwind', 'vwind') @@ -1491,13 +1481,10 @@ def test_declarative_colored_barbs(ccrs): @pytest.mark.mpl_image_compare(remove_text=True, tolerance=0.305) -def test_declarative_sfc_obs_full(ccrs): +def test_declarative_sfc_obs_full(ccrs, pandas_sfc): """Test making a full surface observation plot.""" - data = pd.read_csv(get_test_data('SFC_obs.csv', as_file_obj=False), - infer_datetime_format=True, parse_dates=['valid']) - obs = PlotObs() - obs.data = data + obs.data = pandas_sfc obs.time = datetime(1993, 3, 12, 13) obs.time_window = timedelta(minutes=15) obs.level = None @@ -1608,14 +1595,12 @@ def test_declarative_upa_obs_convert_barb_units(): return pc.figure -def test_attribute_error_time(ccrs): +def test_attribute_error_time(ccrs, pandas_sfc): """Make sure we get a useful error when the time variable is not found.""" - data = pd.read_csv(get_test_data('SFC_obs.csv', as_file_obj=False), - infer_datetime_format=True, parse_dates=['valid']) - data.rename(columns={'valid': 'vtime'}, inplace=True) + pandas_sfc.rename(columns={'valid': 'vtime'}, inplace=True) obs = PlotObs() - obs.data = data + obs.data = pandas_sfc obs.time = datetime(1993, 3, 12, 12) obs.level = None obs.fields = ['tmpf'] @@ -1639,14 +1624,12 @@ def test_attribute_error_time(ccrs): pc.draw() -def test_attribute_error_station(ccrs): +def test_attribute_error_station(ccrs, pandas_sfc): """Make sure we get a useful error when the station variable is not found.""" - data = pd.read_csv(get_test_data('SFC_obs.csv', as_file_obj=False), - infer_datetime_format=True, parse_dates=['valid']) - data.rename(columns={'station': 'location'}, inplace=True) + pandas_sfc.rename(columns={'station': 'location'}, inplace=True) obs = PlotObs() - obs.data = data + obs.data = pandas_sfc obs.time = datetime(1993, 3, 12, 12) obs.level = None obs.fields = ['tmpf'] From 27845daa49ddc11d810b93273dbb84c19c2c1cb8 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Thu, 2 Nov 2023 11:58:09 -0600 Subject: [PATCH 13/36] MNT: Quiet warning about stripped units in zoom_xarray --- src/metpy/calc/basic.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/metpy/calc/basic.py b/src/metpy/calc/basic.py index 4cd0e332a4c..bd4d096d789 100644 --- a/src/metpy/calc/basic.py +++ b/src/metpy/calc/basic.py @@ -1107,6 +1107,8 @@ def zoom_xarray(input_field, zoom, output=None, order=3, mode='constant', cval=0 available. """ + # Dequantify input to avoid warnings and make sure units propagate + input_field = input_field.metpy.dequantify() # Zoom data zoomed_data = scipy_zoom( input_field.data, zoom, output=output, order=order, mode=mode, cval=cval, From ddb0ae0938b058e120f5e0b01fed2d5ccd7a906d Mon Sep 17 00:00:00 2001 From: Ryan May Date: Thu, 2 Nov 2023 13:29:16 -0600 Subject: [PATCH 14/36] MNT: Explicitly handle some test warnings for making lat/lon --- tests/calc/test_calc_tools.py | 6 +++++- tests/calc/test_kinematics.py | 2 +- tests/plots/test_declarative.py | 2 +- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/tests/calc/test_calc_tools.py b/tests/calc/test_calc_tools.py index 12ab6fdb065..1ed737c0c8a 100644 --- a/tests/calc/test_calc_tools.py +++ b/tests/calc/test_calc_tools.py @@ -1327,7 +1327,11 @@ def check_params(scalar, dx=None, dy=None, parallel_scale=None, meridional_scale if subset: temp = temp.isel(time=0).metpy.sel(vertical=500 * units.hPa) - t, dx, dy, p, m, lat, x_dim, y_dim = check_params(temp) + if datafile != 'GFS_test.nc' and (not assign_lat_lon or no_crs): + with pytest.warns(UserWarning, match='Latitude and longitude computed on-demand'): + t, dx, dy, p, m, lat, x_dim, y_dim = check_params(temp) + else: + t, dx, dy, p, m, lat, x_dim, y_dim = check_params(temp) if transpose: if subset: diff --git a/tests/calc/test_kinematics.py b/tests/calc/test_kinematics.py index a5f120f9579..4a5cb5330aa 100644 --- a/tests/calc/test_kinematics.py +++ b/tests/calc/test_kinematics.py @@ -144,7 +144,7 @@ def test_vorticity_grid_pole(): u = xr.DataArray(us, name='u', coords=(y, x), dims=('y', 'x'), attrs={'units': 'm/s'}) v = xr.DataArray(vs, name='v', coords=(y, x), dims=('y', 'x'), attrs={'units': 'm/s'}) - ds = xr.merge((u, v)).metpy.assign_crs(grid) + ds = xr.merge((u, v)).metpy.assign_crs(grid).metpy.assign_latitude_longitude() vort = vorticity(ds.u, ds.v) diff --git a/tests/plots/test_declarative.py b/tests/plots/test_declarative.py index e1ac516b66a..cfdaa598696 100644 --- a/tests/plots/test_declarative.py +++ b/tests/plots/test_declarative.py @@ -184,7 +184,7 @@ def test_declarative_smooth_contour(): def test_declarative_smooth_contour_calculation(): """Test making a contour plot using smooth_contour.""" data = xr.open_dataset(get_test_data('narr_example.nc', as_file_obj=False)) - data = data.metpy.parse_cf() + data = data.metpy.parse_cf().metpy.assign_latitude_longitude() data['wind_speed'] = wind_speed(data['u_wind'], data['v_wind']) From 1811328372841417a8346c1f4fd4983e62de774c Mon Sep 17 00:00:00 2001 From: Ryan May Date: Thu, 2 Nov 2023 13:36:49 -0600 Subject: [PATCH 15/36] TST: Clean up Pandas indexing warnings in METAR tests --- tests/io/test_metar.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/io/test_metar.py b/tests/io/test_metar.py index 6ed6d170460..d3dad5629b8 100644 --- a/tests/io/test_metar.py +++ b/tests/io/test_metar.py @@ -212,10 +212,10 @@ def test_date_time_given(): """Test for when date_time is given.""" df = parse_metar_to_dataframe('K6B0 261200Z AUTO 00000KT 10SM CLR 20/M17 A3002 RMK AO2 ' 'T01990165=', year=2019, month=6) - assert df.date_time[0] == datetime(2019, 6, 26, 12) - assert df.eastward_wind[0] == 0 - assert df.northward_wind[0] == 0 - assert_almost_equal(df.air_pressure_at_sea_level[0], 1016.56) + assert df.iloc[0].date_time == datetime(2019, 6, 26, 12) + assert df.iloc[0].eastward_wind == 0 + assert df.iloc[0].northward_wind == 0 + assert_almost_equal(df.iloc[0].air_pressure_at_sea_level, 1016.56) assert_almost_equal(df.visibility.values, 16093.44) @@ -372,7 +372,7 @@ def test_parse_no_pint_objects_in_df(): for df in (parse_metar_file(input_file), parse_metar_to_dataframe(metar_str)): for column in df: - assert not is_quantity(df[column][0]) + assert not is_quantity(df.iloc[0][column]) def test_repr(): From 6c35d2d9768dbd3fb9db315ea650b8c61a94627e Mon Sep 17 00:00:00 2001 From: Ryan May Date: Thu, 2 Nov 2023 13:54:46 -0600 Subject: [PATCH 16/36] TST: Filter some warnings about dimensions These aren't necessarily correct, but fully fixing/eliminating them out of the advection call is difficult. --- tests/calc/test_kinematics.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/calc/test_kinematics.py b/tests/calc/test_kinematics.py index 4a5cb5330aa..f71a75bc530 100644 --- a/tests/calc/test_kinematics.py +++ b/tests/calc/test_kinematics.py @@ -376,6 +376,7 @@ def test_advection_4d_vertical(data_4d): assert a.data.units == units.Unit('K/sec') +@pytest.mark.filterwarnings('ignore:Horizontal dimension numbers not found.') def test_advection_1d_vertical(): """Test 1-d vertical advection with parsed dims.""" pressure = xr.DataArray( @@ -403,6 +404,7 @@ def test_advection_2d_asym(): assert_array_equal(a, truth) +@pytest.mark.filterwarnings('ignore:Vertical dimension number not found.') def test_advection_xarray(basic_dataset): """Test advection calculation using xarray support.""" a = advection(basic_dataset.temperature, basic_dataset.u, basic_dataset.v) From a046f099a4e14249b234aa43d37029a669484df3 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Thu, 2 Nov 2023 13:59:12 -0600 Subject: [PATCH 17/36] TST: Catch an expected warning about missing dimensions --- tests/calc/test_calc_tools.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/calc/test_calc_tools.py b/tests/calc/test_calc_tools.py index 1ed737c0c8a..f59d6c558b3 100644 --- a/tests/calc/test_calc_tools.py +++ b/tests/calc/test_calc_tools.py @@ -1417,8 +1417,9 @@ def check_params(scalar, dx=None, dy=None, x_dim=-1, y_dim=-2): }, attrs={'units': 'K'}).to_dataset().metpy.parse_cf('temperature') - with pytest.raises(AttributeError, - match='horizontal dimension coordinates cannot be found.'): + with (pytest.raises(AttributeError, + match='horizontal dimension coordinates cannot be found.'), + pytest.warns(UserWarning, match='Horizontal dimension numbers not found.')): check_params(test_da) From 3764f06f4704a977e56d2adad30caab865e10d6d Mon Sep 17 00:00:00 2001 From: Ryan May Date: Thu, 2 Nov 2023 14:29:04 -0600 Subject: [PATCH 18/36] BUG: Fix wrapper returning a list of Quantity instances We want a Quantity instead. --- src/metpy/xarray.py | 3 ++- tests/calc/test_calc_tools.py | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/metpy/xarray.py b/src/metpy/xarray.py index e45f03af027..4030a031ade 100644 --- a/src/metpy/xarray.py +++ b/src/metpy/xarray.py @@ -1316,6 +1316,7 @@ def cast_variables(arg, arg_name): for i, arg in enumerate(wrap_like): if isinstance(arg, str): match[i] = bound_args.arguments[arg] + match = tuple(match) # Cast all DataArrays to Pint Quantities _mutate_arguments(bound_args, xr.DataArray, lambda arg, _: arg.metpy.unit_array) @@ -1336,7 +1337,7 @@ def cast_variables(arg, arg_name): else: wrapping = _wrap_output_like_not_matching_units - if isinstance(match, list): + if isinstance(match, tuple): return tuple(wrapping(*args) for args in zip(result, match)) else: return wrapping(result, match) diff --git a/tests/calc/test_calc_tools.py b/tests/calc/test_calc_tools.py index f59d6c558b3..6f5b8a3c28a 100644 --- a/tests/calc/test_calc_tools.py +++ b/tests/calc/test_calc_tools.py @@ -747,6 +747,7 @@ def test_parse_angle_invalid_arr(): test_dir_strs = ['nan', None, np.nan, 35, 35.5, 'north', 'andrewiscool'] expected_angles_degrees = [np.nan, np.nan, np.nan, np.nan, np.nan, 0, np.nan] output_angles_degrees = parse_angle(test_dir_strs) + assert isinstance(output_angles_degrees, units.Quantity) assert_array_almost_equal(output_angles_degrees, expected_angles_degrees) From ae9c4fd25f12d936da69cbd96672fcc63d40a039 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Fri, 3 Nov 2023 15:25:00 -0600 Subject: [PATCH 19/36] TST: Ensure testing figures are closed pytest-mpl handles it for figures we compare, but we need to manually close other figures we generate. --- conftest.py | 11 ------- src/metpy/testing.py | 14 +++++++++ tests/plots/test_declarative.py | 21 ++++++++++---- tests/plots/test_mpl.py | 16 +++++------ tests/plots/test_skewt.py | 38 +++++++++++++------------ tests/plots/test_station_plot.py | 49 ++++++++++++++++---------------- tests/plots/test_util.py | 17 ++++++----- 7 files changed, 90 insertions(+), 76 deletions(-) diff --git a/conftest.py b/conftest.py index 6c18a74c6ce..6825ca51621 100644 --- a/conftest.py +++ b/conftest.py @@ -152,17 +152,6 @@ def test_da_xy(): return ds.metpy.parse_cf('temperature') -@pytest.fixture() -def set_agg_backend(): - """Fixture to ensure the Agg backend is active.""" - prev_backend = matplotlib.pyplot.get_backend() - try: - matplotlib.pyplot.switch_backend('agg') - yield - finally: - matplotlib.pyplot.switch_backend(prev_backend) - - @pytest.fixture(params=['dask', 'xarray', 'masked', 'numpy']) def array_type(request): """Return an array type for testing calc functions.""" diff --git a/src/metpy/testing.py b/src/metpy/testing.py index 529b6609293..7f3de9f4856 100644 --- a/src/metpy/testing.py +++ b/src/metpy/testing.py @@ -13,6 +13,7 @@ import operator as op import re +import matplotlib.pyplot as plt import numpy as np import numpy.testing from packaging.version import Version @@ -126,6 +127,19 @@ def wrapped(*args, **kwargs): needs_cartopy = needs_module('cartopy') +@contextlib.contextmanager +def autoclose_figure(*args, **kwargs): + """Create a figure that is automatically closed when exiting a block. + + ``*args`` and ``**kwargs`` are forwarded onto the call to `plt.figure()`. + """ + fig = plt.figure(*args, **kwargs) + try: + yield fig + finally: + plt.close(fig) + + def get_upper_air_data(date, station): """Get upper air observations from the test data cache. diff --git a/tests/plots/test_declarative.py b/tests/plots/test_declarative.py index cfdaa598696..b06ef8a8da1 100644 --- a/tests/plots/test_declarative.py +++ b/tests/plots/test_declarative.py @@ -5,6 +5,7 @@ from datetime import datetime, timedelta from io import BytesIO +from unittest.mock import patch, PropertyMock import warnings import matplotlib.pyplot as plt @@ -569,6 +570,8 @@ def test_ndim_error_scalar(cfeature): with pytest.raises(ValueError): pc.draw() + plt.close(pc.figure) + def test_ndim_error_vector(cfeature): """Make sure we get a useful error when the field is not set.""" @@ -590,6 +593,8 @@ def test_ndim_error_vector(cfeature): with pytest.raises(ValueError): pc.draw() + plt.close(pc.figure) + def test_no_field_error_barbs(): """Make sure we get a useful error when the field is not set.""" @@ -1623,6 +1628,8 @@ def test_attribute_error_time(ccrs, pandas_sfc): with pytest.raises(AttributeError): pc.draw() + plt.close(pc.figure) + def test_attribute_error_station(ccrs, pandas_sfc): """Make sure we get a useful error when the station variable is not found.""" @@ -1652,6 +1659,8 @@ def test_attribute_error_station(ccrs, pandas_sfc): with pytest.raises(AttributeError): pc.draw() + plt.close(pc.figure) + @pytest.mark.mpl_image_compare(remove_text=True, tolerance=0.024) def test_declarative_sfc_obs_change_units(ccrs): @@ -1970,6 +1979,7 @@ def test_save(): pc = PanelContainer() fobj = BytesIO() pc.save(fobj, format='png') + plt.close(pc.figure) fobj.seek(0) @@ -1977,14 +1987,14 @@ def test_save(): assert fobj.read() -def test_show(set_agg_backend): +def test_show(): """Test that show works properly.""" pc = PanelContainer() - - # Matplotlib warns when using show with Agg - with warnings.catch_warnings(): - warnings.simplefilter('ignore', UserWarning) + with patch.object(plt, 'show', new_callable=PropertyMock) as show: pc.show() + show.assert_called() + + plt.close(pc.figure) @needs_cartopy @@ -2026,6 +2036,7 @@ def test_copy(): copied_obj = obj.copy() assert obj is not copied_obj assert obj.size == copied_obj.size + plt.close(obj.figure) # Copies of plots in MapPanels should not point to same location in memory obj = MapPanel() diff --git a/tests/plots/test_mpl.py b/tests/plots/test_mpl.py index 3dfd603be8b..4da999f8cc9 100644 --- a/tests/plots/test_mpl.py +++ b/tests/plots/test_mpl.py @@ -6,11 +6,11 @@ from tempfile import TemporaryFile import matplotlib.patheffects as mpatheffects -import matplotlib.pyplot as plt import numpy as np # Needed to trigger scattertext monkey-patching import metpy.plots # noqa: F401, I202 +from metpy.testing import autoclose_figure # Avoiding an image-based test here since that would involve text, which can be tricky @@ -19,11 +19,11 @@ def test_scattertext_patheffect_empty(): """Test scattertext with empty strings and PathEffects (Issue #245).""" strings = ['abc', '', 'def'] x, y = np.arange(6).reshape(2, 3) - fig = plt.figure() - ax = fig.add_subplot(1, 1, 1) - ax.scattertext(x, y, strings, color='white', - path_effects=[mpatheffects.withStroke(linewidth=1, foreground='black')]) + with autoclose_figure() as fig: + ax = fig.add_subplot(1, 1, 1) + ax.scattertext(x, y, strings, color='white', + path_effects=[mpatheffects.withStroke(linewidth=1, foreground='black')]) - # Need to trigger a render - with TemporaryFile('wb') as fobj: - fig.savefig(fobj) + # Need to trigger a render + with TemporaryFile('wb') as fobj: + fig.savefig(fobj) diff --git a/tests/plots/test_skewt.py b/tests/plots/test_skewt.py index 433fa095491..2a66389a3f7 100644 --- a/tests/plots/test_skewt.py +++ b/tests/plots/test_skewt.py @@ -11,7 +11,7 @@ import pytest from metpy.plots import Hodograph, SkewT -from metpy.testing import version_check +from metpy.testing import autoclose_figure, version_check from metpy.units import units @@ -126,7 +126,8 @@ def test_skewt_with_grid_enabled(): """Test using SkewT when gridlines are already enabled (#271).""" with plt.rc_context(rc={'axes.grid': True}): # Also tests when we don't pass in Figure - SkewT(aspect='auto') + s = SkewT(aspect='auto') + plt.close(s.ax.figure) @pytest.mark.mpl_image_compare(tolerance=0., remove_text=True, style='default') @@ -139,8 +140,8 @@ def test_skewt_arbitrary_rect(): def test_skewt_subplot_rect_conflict(): """Test the subplot/rect conflict failure.""" - with pytest.raises(ValueError): - SkewT(rect=(0.15, 0.35, 0.8, 0.3), subplot=(1, 1, 1)) + with pytest.raises(ValueError), autoclose_figure(figsize=(7, 7)) as fig: + SkewT(fig, rect=(0.15, 0.35, 0.8, 0.3), subplot=(1, 1, 1)) @pytest.mark.mpl_image_compare(tolerance=0.0198, remove_text=True, style='default') @@ -262,12 +263,12 @@ def test_skewt_shade_area(test_profile): def test_skewt_shade_area_invalid(test_profile): """Test shading areas on a SkewT plot.""" p, t, _, tp = test_profile - fig = plt.figure(figsize=(9, 9)) - skew = SkewT(fig, aspect='auto') - skew.plot(p, t, 'r') - skew.plot(p, tp, 'k') - with pytest.raises(ValueError): - skew.shade_area(p, t, tp, which='positve') + with autoclose_figure(figsize=(9, 9)) as fig: + skew = SkewT(fig, aspect='auto') + skew.plot(p, t, 'r') + skew.plot(p, tp, 'k') + with pytest.raises(ValueError): + skew.shade_area(p, t, tp, which='positve') @pytest.mark.mpl_image_compare(tolerance=0.033, remove_text=True, style='default') @@ -356,7 +357,8 @@ def test_hodograph_masked_array(): def test_hodograph_alone(): """Test to create Hodograph without specifying axes.""" - Hodograph() + h = Hodograph() + plt.close(h.ax.figure) @pytest.mark.mpl_image_compare(tolerance=0, remove_text=True) @@ -430,10 +432,10 @@ def test_skewt_barb_unit_conversion_exception(u, v): """Test that an error is raised if unit conversion is requested on plain arrays.""" p_wind = np.array([500]) * units.hPa - fig = plt.figure(figsize=(9, 9)) - skew = SkewT(fig, aspect='auto') - with pytest.raises(ValueError): - skew.plot_barbs(p_wind, u, v, plot_units='knots') + with autoclose_figure(figsize=(9, 9)) as fig: + skew = SkewT(fig, aspect='auto') + with pytest.raises(ValueError): + skew.plot_barbs(p_wind, u, v, plot_units='knots') @pytest.mark.mpl_image_compare(tolerance=0, remove_text=True) @@ -571,6 +573,6 @@ def test_hodograph_wind_vectors(): def test_hodograph_range_with_units(): """Tests making a hodograph with a range with units.""" - fig = plt.figure(figsize=(6, 6)) - ax = fig.add_subplot(1, 1, 1) - Hodograph(ax, component_range=60. * units.knots) + with autoclose_figure(figsize=(6, 6)) as fig: + ax = fig.add_subplot(1, 1, 1) + Hodograph(ax, component_range=60. * units.knots) diff --git a/tests/plots/test_station_plot.py b/tests/plots/test_station_plot.py index 80012bc9ab0..8873b8cbc55 100644 --- a/tests/plots/test_station_plot.py +++ b/tests/plots/test_station_plot.py @@ -11,6 +11,7 @@ from metpy.plots import (current_weather, high_clouds, nws_layout, simple_layout, sky_cover, StationPlot, StationPlotLayout) +from metpy.testing import autoclose_figure from metpy.units import units @@ -145,8 +146,6 @@ def test_stationlayout_api(): def test_station_layout_odd_data(): """Test more corner cases with data passed in.""" - fig = plt.figure(figsize=(9, 9)) - # Set up test layout layout = StationPlotLayout() layout.add_barb('u', 'v') @@ -156,9 +155,9 @@ def test_station_layout_odd_data(): data = {'temperature': [25.]} # Make the plot - sp = StationPlot(fig.add_subplot(1, 1, 1), [1], [2], fontsize=12) - layout.plot(sp, data) - assert True + with autoclose_figure(figsize=(9, 9)) as fig: + sp = StationPlot(fig.add_subplot(1, 1, 1), [1], [2], fontsize=12) + layout.plot(sp, data) def test_station_layout_replace(): @@ -333,22 +332,22 @@ def test_barb_projection_list(wind_projection_list): """Test that barbs will be projected when lat/lon lists are provided.""" lat, lon, u, v = wind_projection_list - fig = plt.figure() - ax = fig.add_subplot(1, 1, 1) - stnplot = StationPlot(ax, lon, lat) - stnplot.plot_barb(u, v) - assert stnplot.barbs + with autoclose_figure() as fig: + ax = fig.add_subplot(1, 1, 1) + stnplot = StationPlot(ax, lon, lat) + stnplot.plot_barb(u, v) + assert stnplot.barbs def test_arrow_projection_list(wind_projection_list): """Test that arrows will be projected when lat/lon lists are provided.""" lat, lon, u, v = wind_projection_list - fig = plt.figure() - ax = fig.add_subplot(1, 1, 1) - stnplot = StationPlot(ax, lon, lat) - stnplot.plot_arrow(u, v) - assert stnplot.arrows + with autoclose_figure() as fig: + ax = fig.add_subplot(1, 1, 1) + stnplot = StationPlot(ax, lon, lat) + stnplot.plot_arrow(u, v) + assert stnplot.arrows @pytest.fixture @@ -416,11 +415,11 @@ def test_barb_unit_conversion_exception(u, v): x_pos = np.array([0]) y_pos = np.array([0]) - fig = plt.figure() - ax = fig.add_subplot(1, 1, 1) - stnplot = StationPlot(ax, x_pos, y_pos) - with pytest.raises(ValueError): - stnplot.plot_barb(u, v, plot_units='knots') + with autoclose_figure() as fig: + ax = fig.add_subplot(1, 1, 1) + stnplot = StationPlot(ax, x_pos, y_pos) + with pytest.raises(ValueError): + stnplot.plot_barb(u, v, plot_units='knots') @pytest.mark.mpl_image_compare(tolerance=0.021, savefig_kwargs={'dpi': 300}, remove_text=True) @@ -467,8 +466,8 @@ def test_scalar_unit_conversion_exception(): x_pos = np.array([0]) y_pos = np.array([0]) - fig = plt.figure() - ax = fig.add_subplot(1, 1, 1) - stnplot = StationPlot(ax, x_pos, y_pos) - with pytest.raises(ValueError): - stnplot.plot_parameter('C', 50, plot_units='degC') + with autoclose_figure() as fig: + ax = fig.add_subplot(1, 1, 1) + stnplot = StationPlot(ax, x_pos, y_pos) + with pytest.raises(ValueError): + stnplot.plot_parameter('C', 50, plot_units='degC') diff --git a/tests/plots/test_util.py b/tests/plots/test_util.py index 6d77ae33ef9..96b366cd878 100644 --- a/tests/plots/test_util.py +++ b/tests/plots/test_util.py @@ -11,7 +11,7 @@ import xarray as xr from metpy.plots import add_metpy_logo, add_timestamp, add_unidata_logo, convert_gempak_color -from metpy.testing import get_test_data, version_check +from metpy.testing import autoclose_figure, get_test_data, version_check @pytest.mark.mpl_image_compare(tolerance=2.638, remove_text=True) @@ -52,12 +52,12 @@ def test_add_timestamp_high_contrast(): def test_add_timestamp_xarray(): """Test that add_timestamp can work with xarray datetime accessor.""" - fig = plt.figure() - ax = fig.add_subplot(1, 1, 1) - ds = xr.open_dataset(get_test_data('AK-REGIONAL_8km_3.9_20160408_1445.gini'), - engine='gini') - txt = add_timestamp(ax, ds.time.dt, pretext='') - assert txt.get_text() == '2016-04-08T14:45:20Z' + with autoclose_figure() as fig: + ax = fig.add_subplot(1, 1, 1) + ds = xr.open_dataset(get_test_data('AK-REGIONAL_8km_3.9_20160408_1445.gini'), + engine='gini') + txt = add_timestamp(ax, ds.time.dt, pretext='') + assert txt.get_text() == '2016-04-08T14:45:20Z' @pytest.mark.mpl_image_compare(tolerance=0.004, remove_text=True) @@ -86,8 +86,7 @@ def test_add_unidata_logo(): def test_add_logo_invalid_size(): """Test adding a logo to a figure with an invalid size specification.""" - fig = plt.figure(figsize=(9, 9)) - with pytest.raises(ValueError): + with pytest.raises(ValueError), autoclose_figure(figsize=(9, 9)) as fig: add_metpy_logo(fig, size='jumbo') From 50dc4252f25038d427849a91f7df640034e46189 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Fri, 3 Nov 2023 15:42:09 -0600 Subject: [PATCH 20/36] MNT: Avoid Matplotlib 3.8 deprecation warning Matplotlib 3.8 deprecated the collections attribute on ContourSet. --- src/metpy/plots/declarative.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/metpy/plots/declarative.py b/src/metpy/plots/declarative.py index 182b7ef252e..6a62cc3be13 100644 --- a/src/metpy/plots/declarative.py +++ b/src/metpy/plots/declarative.py @@ -10,6 +10,7 @@ from itertools import cycle import re +from matplotlib.contour import ContourSet import matplotlib.patheffects as patheffects import matplotlib.pyplot as plt import numpy as np @@ -683,7 +684,10 @@ def clear(self): """ if getattr(self, 'handle', None) is not None: - if getattr(self.handle, 'collections', None) is not None: + # In matplotlib 3.8, the collections attribute on ContourSet was deprecated. + # Check for that here so we can avoid the deprecation warning. + if (not isinstance(ContourSet.__dict__.get('collections'), property) + and getattr(self.handle, 'collections', None) is not None): self.clear_collections() else: self.clear_handle() From ddf37b5274235c6f2b8e091b6716e9a46d483574 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Fri, 3 Nov 2023 15:46:21 -0600 Subject: [PATCH 21/36] TST: Add missing warning expectation in test --- tests/calc/test_thermo.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/calc/test_thermo.py b/tests/calc/test_thermo.py index 3c94e5caded..4f6fa049a4b 100644 --- a/tests/calc/test_thermo.py +++ b/tests/calc/test_thermo.py @@ -2526,7 +2526,8 @@ def test_parcel_profile_with_lcl_as_dataset_duplicates(): } ) - profile = parcel_profile_with_lcl_as_dataset(pressure, temperature, dewpoint) + with pytest.warns(UserWarning, match='Duplicate pressure'): + profile = parcel_profile_with_lcl_as_dataset(pressure, temperature, dewpoint) xr.testing.assert_allclose(profile, truth, atol=1e-5) From a12403ed2995bf80649ea5c26b0b1c51ea3e893b Mon Sep 17 00:00:00 2001 From: Ryan May Date: Fri, 3 Nov 2023 16:11:31 -0600 Subject: [PATCH 22/36] MNT: Avoid some UnitStrippedWarnings Avoid passing Quantity instances to matplotlib plots that don't support them in the declarative interface. --- src/metpy/plots/declarative.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/metpy/plots/declarative.py b/src/metpy/plots/declarative.py index 6a62cc3be13..7dd31a9fc46 100644 --- a/src/metpy/plots/declarative.py +++ b/src/metpy/plots/declarative.py @@ -1066,7 +1066,8 @@ def _build(self): kwargs.setdefault('colors', self.linecolor) kwargs.setdefault('linestyles', self.linestyle) - self.handle = self.parent.ax.contour(x_like, y_like, imdata, self.contours, **kwargs) + self.handle = self.parent.ax.contour(x_like, y_like, imdata.metpy.dequantify(), + self.contours, **kwargs) if self.clabels: self.handle.clabel(inline=1, fmt='%.0f', inline_spacing=8, use_clabeltext=True, fontsize=self.label_fontsize) @@ -1207,11 +1208,13 @@ def griddata(self): if self.plot_units is not None: data_subset_u = data_subset_u.metpy.convert_units(self.plot_units) + data_subset_u = data_subset_u.metpy.dequantify() data_subset_v = data_subset_v.metpy.convert_units(self.plot_units) + data_subset_v = data_subset_v.metpy.dequantify() self._griddata_u = data_subset_u * self.scale self._griddata_v = data_subset_v * self.scale - return (self._griddata_u, self._griddata_v) + return self._griddata_u, self._griddata_v @property def plotdata(self): From e6a8227604f06f50e5bcd3c86108ab0ef083f4ab Mon Sep 17 00:00:00 2001 From: Ryan May Date: Mon, 6 Nov 2023 17:20:46 -0700 Subject: [PATCH 23/36] MNT: Use tz-aware datetime methods Python 3.12 deprecated UTC tz-naive methods utcnow() and utcfromtimestamp(). This adds a filter for one import-time warning from dateutil. Our approach here maintains using tz-naive objects afterwards for user-accessible objects. --- docs/conf.py | 4 ++-- examples/meteogram_metpy.py | 2 +- pyproject.toml | 7 ++++++- src/metpy/io/nexrad.py | 5 +++-- src/metpy/io/text.py | 4 ++-- src/metpy/plots/_util.py | 6 +++--- 6 files changed, 17 insertions(+), 11 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 1683f016ba6..e09dcb2f571 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -9,7 +9,7 @@ # All configuration values have a default; values that are commented out # serve to show the default. -from datetime import datetime +from datetime import datetime, timezone import inspect import os from pathlib import Path @@ -129,7 +129,7 @@ # The encoding of source files. # source_encoding = 'utf-8-sig' -cur_date = datetime.utcnow() +cur_date = datetime.now(timezone.utc) # The main toctree document. master_doc = 'index' diff --git a/examples/meteogram_metpy.py b/examples/meteogram_metpy.py index 0dd259467a5..e0bff215de3 100644 --- a/examples/meteogram_metpy.py +++ b/examples/meteogram_metpy.py @@ -46,7 +46,7 @@ def __init__(self, fig, dates, probeid, time=None, axis=0): axis: number that controls the new axis to be plotted (FOR FUTURE) """ if not time: - time = dt.datetime.utcnow() + time = dt.datetime.now(dt.timezone.utc) self.start = dates[0] self.fig = fig self.end = dates[-1] diff --git a/pyproject.toml b/pyproject.toml index 91c134acd83..e067e01be22 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -103,7 +103,12 @@ norecursedirs = "build docs .idea" doctest_optionflags = "NORMALIZE_WHITESPACE" mpl-results-path = "test_output" xfail_strict = true -filterwarnings = ["ignore:numpy.ndarray size changed:RuntimeWarning"] +filterwarnings = [ + "ignore:numpy.ndarray size changed:RuntimeWarning", + # To be removed in the next python-dateutil release. + # See: https://github.com/dateutil/dateutil/issues/1314 + 'ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:dateutil.tz.tz:37' +] [tool.ruff] line-length = 95 diff --git a/src/metpy/io/nexrad.py b/src/metpy/io/nexrad.py index ff76a1dae9b..f49b3615083 100644 --- a/src/metpy/io/nexrad.py +++ b/src/metpy/io/nexrad.py @@ -6,7 +6,7 @@ import bz2 from collections import defaultdict, namedtuple, OrderedDict import contextlib -import datetime +from datetime import datetime, timezone import logging import pathlib import re @@ -75,7 +75,8 @@ def bzip_blocks_decompress_all(data): def nexrad_to_datetime(julian_date, ms_midnight): """Convert NEXRAD date time format to python `datetime.datetime`.""" # Subtracting one from julian_date is because epoch date is 1 - return datetime.datetime.utcfromtimestamp((julian_date - 1) * day + ms_midnight * milli) + return datetime.fromtimestamp((julian_date - 1) * day + ms_midnight * milli, + tz=timezone.utc).replace(tzinfo=None) def remap_status(val): diff --git a/src/metpy/io/text.py b/src/metpy/io/text.py index 9ff480190da..9af16a99c11 100644 --- a/src/metpy/io/text.py +++ b/src/metpy/io/text.py @@ -4,7 +4,7 @@ """Support reading information from various text file formats.""" import contextlib -from datetime import datetime +from datetime import datetime, timezone import re import string @@ -95,7 +95,7 @@ def parse_wpc_surface_bulletin(bulletin, year=None): text = file.read().decode('utf-8') parsed_text = [] - valid_time = datetime.utcnow() + valid_time = datetime.now(timezone.utc).replace(tzinfo=None) for parts in _regroup_lines(text.splitlines()): # A single file may have multiple sets of data that are valid at different times. Set # the valid_time string that will correspond to all the following lines parsed, until diff --git a/src/metpy/plots/_util.py b/src/metpy/plots/_util.py index 1135d538e65..5992dc4f8c4 100644 --- a/src/metpy/plots/_util.py +++ b/src/metpy/plots/_util.py @@ -3,7 +3,7 @@ # SPDX-License-Identifier: BSD-3-Clause """Utilities for use in making plots.""" -from datetime import datetime +from datetime import datetime, timezone from matplotlib.collections import LineCollection import matplotlib.patheffects as mpatheffects @@ -24,7 +24,7 @@ def add_timestamp(ax, time=None, x=0.99, y=-0.04, ha='right', high_contrast=Fals ax : `matplotlib.axes.Axes` The `Axes` instance used for plotting time : `datetime.datetime` (or any object with a compatible ``strftime`` method) - Specific time to be plotted - datetime.utcnow will be use if not specified + Specific time to be plotted - ``datetime.now(UTC)`` will be use if not specified x : float Relative x position on the axes of the timestamp y : float @@ -52,7 +52,7 @@ def add_timestamp(ax, time=None, x=0.99, y=-0.04, ha='right', high_contrast=Fals text_args = {} text_args.update(**kwargs) if not time: - time = datetime.utcnow() + time = datetime.now(timezone.utc) timestr = time.strftime(time_format) # If we don't have a time string after that, assume xarray/numpy and see if item if not isinstance(timestr, str): From 9456d1dcbd599723ec91a2856d8d995963fe451b Mon Sep 17 00:00:00 2001 From: Ryan May Date: Wed, 8 Nov 2023 14:08:15 -0700 Subject: [PATCH 24/36] MNT: Avoid some unit stripped warnings from cross-section --- src/metpy/calc/cross_sections.py | 4 ++-- src/metpy/interpolate/slices.py | 8 ++------ tests/calc/test_indices.py | 2 +- 3 files changed, 5 insertions(+), 9 deletions(-) diff --git a/src/metpy/calc/cross_sections.py b/src/metpy/calc/cross_sections.py index acc8bba28a5..1b33330a389 100644 --- a/src/metpy/calc/cross_sections.py +++ b/src/metpy/calc/cross_sections.py @@ -123,8 +123,8 @@ def unit_vectors_from_cross_section(cross, index='index'): """ x, y = distances_from_cross_section(cross) - dx_di = first_derivative(x, axis=index).values - dy_di = first_derivative(y, axis=index).values + dx_di = first_derivative(x, axis=index).data + dy_di = first_derivative(y, axis=index).data tangent_vector_mag = np.hypot(dx_di, dy_di) unit_tangent_vector = np.vstack([dx_di / tangent_vector_mag, dy_di / tangent_vector_mag]) unit_normal_vector = np.vstack([-dy_di / tangent_vector_mag, dx_di / tangent_vector_mag]) diff --git a/src/metpy/interpolate/slices.py b/src/metpy/interpolate/slices.py index 33f390c5298..b11199565e7 100644 --- a/src/metpy/interpolate/slices.py +++ b/src/metpy/interpolate/slices.py @@ -7,7 +7,6 @@ import xarray as xr from ..package_tools import Exporter -from ..units import is_quantity, units from ..xarray import check_axis exporter = Exporter(globals()) @@ -50,17 +49,14 @@ def interpolate_to_slice(data, points, interp_type='linear'): 'your data has been parsed by MetPy with proper x and y ' 'dimension coordinates.') from None + data = data.metpy.dequantify() data_sliced = data.interp({ x.name: xr.DataArray(points[:, 0], dims='index', attrs=x.attrs), y.name: xr.DataArray(points[:, 1], dims='index', attrs=y.attrs) }, method=interp_type) data_sliced.coords['index'] = range(len(points)) - # Bug in xarray: interp strips units - if is_quantity(data.data) and not is_quantity(data_sliced.data): - data_sliced.data = units.Quantity(data_sliced.data, data.data.units) - - return data_sliced + return data_sliced.metpy.quantify() @exporter.export diff --git a/tests/calc/test_indices.py b/tests/calc/test_indices.py index 1a5ab52ebcd..0a2982a65f9 100644 --- a/tests/calc/test_indices.py +++ b/tests/calc/test_indices.py @@ -158,7 +158,7 @@ def test_weighted_continuous_average_elevated(): def test_precipitable_water_xarray(): """Test precipitable water with xarray input.""" data = get_upper_air_data(datetime(2016, 5, 22, 0), 'DDC') - press = xr.DataArray(data['pressure'], attrs={'units': str(data['pressure'].units)}) + press = xr.DataArray(data['pressure'].m, attrs={'units': str(data['pressure'].units)}) dewp = xr.DataArray(data['dewpoint'], dims=('press',), coords=(press,)) pw = precipitable_water(press, dewp, top=400 * units.hPa) truth = 22.60430651 * units.millimeters From e74ca9c0d8b078c31aa4295404366f664a98065a Mon Sep 17 00:00:00 2001 From: Ryan May Date: Wed, 8 Nov 2023 15:31:46 -0700 Subject: [PATCH 25/36] MNT: Adjust image thresholds This allows the tests to pass properly on a "minimum" config on my mac M2. --- tests/plots/test_declarative.py | 8 ++++---- tests/plots/test_station_plot.py | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/plots/test_declarative.py b/tests/plots/test_declarative.py index b06ef8a8da1..407736a25bd 100644 --- a/tests/plots/test_declarative.py +++ b/tests/plots/test_declarative.py @@ -840,7 +840,7 @@ def test_latlon(): return pc.figure -@pytest.mark.mpl_image_compare(remove_text=True, tolerance=0.343) +@pytest.mark.mpl_image_compare(remove_text=True, tolerance=0.393) @needs_cartopy def test_declarative_barb_options(): """Test making a contour plot.""" @@ -962,7 +962,7 @@ def test_declarative_arrow_changes(): return pc.figure -@pytest.mark.mpl_image_compare(remove_text=True, tolerance=0.86) +@pytest.mark.mpl_image_compare(remove_text=True, tolerance=0.891) @needs_cartopy def test_declarative_barb_earth_relative(): """Test making a contour plot.""" @@ -1520,7 +1520,7 @@ def test_declarative_sfc_obs_full(ccrs, pandas_sfc): return pc.figure -@pytest.mark.mpl_image_compare(remove_text=True, tolerance=0.355) +@pytest.mark.mpl_image_compare(remove_text=True, tolerance=0.522) @needs_cartopy def test_declarative_upa_obs(): """Test making a full upperair observation plot.""" @@ -1557,7 +1557,7 @@ def test_declarative_upa_obs(): return pc.figure -@pytest.mark.mpl_image_compare(remove_text=True, tolerance=0.473) +@pytest.mark.mpl_image_compare(remove_text=True, tolerance=0.518) @needs_cartopy def test_declarative_upa_obs_convert_barb_units(): """Test making a full upperair observation plot with barbs converting units.""" diff --git a/tests/plots/test_station_plot.py b/tests/plots/test_station_plot.py index 8873b8cbc55..54dde8dc52d 100644 --- a/tests/plots/test_station_plot.py +++ b/tests/plots/test_station_plot.py @@ -287,7 +287,7 @@ def wind_plot(): return u, v, x, y -@pytest.mark.mpl_image_compare(remove_text=True, tolerance=0.499) +@pytest.mark.mpl_image_compare(remove_text=True, tolerance=0.5) def test_barb_projection(wind_plot, ccrs): """Test that barbs are properly projected (#598).""" u, v, x, y = wind_plot From 94312575f6005b526e276318e57ff20e33eb1485 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Thu, 9 Nov 2023 17:26:39 -0700 Subject: [PATCH 26/36] TST: Silence some invalid value warnings These are inevitable when you do mod/remainder on a nan, but we're perfectly happy to get nan back. --- tests/calc/test_calc_tools.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/calc/test_calc_tools.py b/tests/calc/test_calc_tools.py index 6f5b8a3c28a..b351372bf32 100644 --- a/tests/calc/test_calc_tools.py +++ b/tests/calc/test_calc_tools.py @@ -884,6 +884,7 @@ def test_angle_to_direction_full(): assert_array_equal(output_dirs, expected_dirs) +@pytest.mark.filterwarnings('ignore:invalid value encountered in remainder:RuntimeWarning') def test_angle_to_direction_invalid_scalar(): """Test invalid angle.""" expected_dirs = UND @@ -891,6 +892,7 @@ def test_angle_to_direction_invalid_scalar(): assert_array_equal(output_dirs, expected_dirs) +@pytest.mark.filterwarnings('ignore:invalid value encountered in remainder:RuntimeWarning') def test_angle_to_direction_invalid_arr(): """Test array of invalid angles.""" expected_dirs = ['NE', UND, UND, UND, 'N'] From 3bd66a02064284a5f97ca0b89bcd678592b108eb Mon Sep 17 00:00:00 2001 From: Ryan May Date: Thu, 30 Nov 2023 12:05:03 -0700 Subject: [PATCH 27/36] TST: Filter some unavoidble numeric warnings These are a natural consequence of masked data/trying to trigger error conditions. --- tests/calc/test_basic.py | 1 + tests/calc/test_thermo.py | 3 +++ 2 files changed, 4 insertions(+) diff --git a/tests/calc/test_basic.py b/tests/calc/test_basic.py index abc1f6ba9ec..c685f69e1fb 100644 --- a/tests/calc/test_basic.py +++ b/tests/calc/test_basic.py @@ -800,6 +800,7 @@ def test_altimiter_to_sea_level_pressure_inhg(): assert_almost_equal(res, truth, 3) +@pytest.mark.filterwarnings('ignore:overflow encountered in exp:RuntimeWarning') def test_altimeter_to_sea_level_pressure_hpa(array_type): """Test the altimeter to sea level pressure function with hectopascals.""" mask = [False, True, False, True] diff --git a/tests/calc/test_thermo.py b/tests/calc/test_thermo.py index 4f6fa049a4b..683283f2f14 100644 --- a/tests/calc/test_thermo.py +++ b/tests/calc/test_thermo.py @@ -203,6 +203,9 @@ def test_moist_lapse_starting_points(start, direction): @pytest.mark.xfail(platform.machine() == 'arm64', reason='ValueError is not raised on Mac M2') @pytest.mark.xfail((sys.platform == 'win32') and version_check('scipy<1.11.3'), reason='solve_ivp() does not error on Windows + SciPy < 1.11.3') +@pytest.mark.filterwarnings('ignore:overflow encountered in exp:RuntimeWarning') +@pytest.mark.filterwarnings(r'ignore:invalid value encountered in \w*divide:RuntimeWarning') +@pytest.mark.filterwarnings(r'ignore:.*Excess accuracy requested.*:UserWarning') def test_moist_lapse_failure(): """Test moist_lapse under conditions that cause the ODE solver to fail.""" p = np.logspace(3, -1, 10) * units.hPa From 08e347cc4817bcf22702feb145b849c558692661 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Thu, 30 Nov 2023 12:41:28 -0700 Subject: [PATCH 28/36] TST: Adjust test values This avoids needing to interpolate an LCL below our bottom pressure. --- tests/calc/test_thermo.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/calc/test_thermo.py b/tests/calc/test_thermo.py index 683283f2f14..a5c66a8fc45 100644 --- a/tests/calc/test_thermo.py +++ b/tests/calc/test_thermo.py @@ -1576,9 +1576,9 @@ def test_mixed_layer_cape_cin_bottom_pressure(multiple_intersections): """Test the calculation of mixed layer cape/cin with a specified bottom pressure.""" pressure, temperature, dewpoint = multiple_intersections mlcape_middle, mlcin_middle = mixed_layer_cape_cin(pressure, temperature, dewpoint, - parcel_start_pressure=800 * units.hPa) - assert_almost_equal(mlcape_middle, 0 * units('joule / kilogram'), 2) - assert_almost_equal(mlcin_middle, 0 * units('joule / kilogram'), 2) + parcel_start_pressure=903 * units.hPa) + assert_almost_equal(mlcape_middle, 1177.86 * units('joule / kilogram'), 2) + assert_almost_equal(mlcin_middle, -37. * units('joule / kilogram'), 2) def test_dcape(): From afe1ff83d093a2dc651e2f38d27f0d95282e367b Mon Sep 17 00:00:00 2001 From: Ryan May Date: Thu, 30 Nov 2023 14:27:58 -0700 Subject: [PATCH 29/36] TST: Filter numpy deprecations triggered by pint --- pyproject.toml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index e067e01be22..75546ee7998 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -107,7 +107,9 @@ filterwarnings = [ "ignore:numpy.ndarray size changed:RuntimeWarning", # To be removed in the next python-dateutil release. # See: https://github.com/dateutil/dateutil/issues/1314 - 'ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:dateutil.tz.tz:37' + 'ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:dateutil.tz.tz:37', + # Numpy deprecation triggered by Pint: https://github.com/hgrecco/pint/pull/1880 + "ignore:Conversion of an array with ndim > 0 to a scalar is deprecated:DeprecationWarning:pint.facets.plain.quantity:575" ] [tool.ruff] From aae1672fcf44cc1165600afc3d6180c668c6dc1e Mon Sep 17 00:00:00 2001 From: Ryan May Date: Thu, 30 Nov 2023 14:29:07 -0700 Subject: [PATCH 30/36] MNT: Fix typo --- src/metpy/calc/basic.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/metpy/calc/basic.py b/src/metpy/calc/basic.py index bd4d096d789..53dd629c465 100644 --- a/src/metpy/calc/basic.py +++ b/src/metpy/calc/basic.py @@ -93,7 +93,7 @@ def wind_direction(u, v, convention='from'): origshape = wdir.shape wdir = np.atleast_1d(wdir) - # Handle oceanographic convection + # Handle oceanographic convention if convention == 'to': wdir -= units.Quantity(180., 'deg') elif convention not in ('to', 'from'): From 81ce986bd9392f1aca67673e1a2cfeee9f32a124 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Thu, 30 Nov 2023 14:48:32 -0700 Subject: [PATCH 31/36] TST: Filter numpy deprecation warnings from pyproj These aren't planned to be addressed until well after numpy 2.0 is released. --- pyproject.toml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 75546ee7998..d48e96a37a3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -109,7 +109,10 @@ filterwarnings = [ # See: https://github.com/dateutil/dateutil/issues/1314 'ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:dateutil.tz.tz:37', # Numpy deprecation triggered by Pint: https://github.com/hgrecco/pint/pull/1880 - "ignore:Conversion of an array with ndim > 0 to a scalar is deprecated:DeprecationWarning:pint.facets.plain.quantity:575" + "ignore:Conversion of an array with ndim > 0 to a scalar is deprecated:DeprecationWarning:pint.facets.plain.quantity:575", + # PyProj automatically dispatching for single point, will be waiting for NumPy 2.0 to address + # See: https://github.com/pyproj4/pyproj/issues/1309 + "ignore:Conversion of an array with ndim > 0 to a scalar is deprecated:DeprecationWarning:pyproj.geod:404" ] [tool.ruff] From 4b6e58c789f54b65dde12daace367c2dbd501cd9 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Wed, 13 Dec 2023 14:26:21 -0700 Subject: [PATCH 32/36] TST: Avoid some warnings about unclosed files Most of this is fixed by passing filenames, rather than file objects, to pandas/numpy so that they have ownership. --- tests/io/test_gempak.py | 26 +++++++++++++------------- tests/io/test_nexrad.py | 9 ++++++++- 2 files changed, 21 insertions(+), 14 deletions(-) diff --git a/tests/io/test_gempak.py b/tests/io/test_gempak.py index 25b7105a6de..92de1a34456 100644 --- a/tests/io/test_gempak.py +++ b/tests/io/test_gempak.py @@ -26,7 +26,8 @@ def test_grid_loading(grid_name): ) gio = grid[0].values.squeeze() - gempak = np.load(get_test_data(f'gem_packing_{grid_name}.npz'))['values'] + gempak = np.load(get_test_data(f'gem_packing_{grid_name}.npz', + as_file_obj=False))['values'] assert_allclose(gio, gempak, rtol=1e-6, atol=0) @@ -56,7 +57,8 @@ def test_merged_sounding(): gimxr = gso[0].imxr.values.squeeze() gdtar = gso[0].dtar.values.squeeze() - gempak = pd.read_csv(get_test_data('gem_model_mrg.csv'), na_values=-9999) + gempak = pd.read_csv(get_test_data('gem_model_mrg.csv', as_file_obj=False), + na_values=-9999) dpres = gempak.PRES.values dtemp = gempak.TMPC.values ddwpt = gempak.DWPC.values @@ -111,7 +113,7 @@ def test_unmerged_sounding(gem, gio, station): gsped = gso[0].sped.values.squeeze() ghght = gso[0].hght.values.squeeze() - gempak = pd.read_csv(get_test_data(f'{gem}'), na_values=-9999) + gempak = pd.read_csv(get_test_data(f'{gem}', as_file_obj=False), na_values=-9999) dpres = gempak.PRES.values dtemp = gempak.TEMP.values ddwpt = gempak.DWPT.values @@ -141,7 +143,8 @@ def test_unmerged_sigw_pressure_sounding(): gsped = gso[0].sped.values.squeeze() ghght = gso[0].hght.values.squeeze() - gempak = pd.read_csv(get_test_data('gem_sigw_pres_unmrg_man_bgl.csv'), na_values=-9999) + gempak = pd.read_csv(get_test_data('gem_sigw_pres_unmrg_man_bgl.csv', as_file_obj=False), + na_values=-9999) dpres = gempak.PRES.values dtemp = gempak.TEMP.values ddwpt = gempak.DWPT.values @@ -164,7 +167,7 @@ def test_standard_surface(): gsf = GempakSurface(get_test_data('gem_std.sfc')) gstns = gsf.sfjson() - gempak = pd.read_csv(get_test_data('gem_std.csv')) + gempak = pd.read_csv(get_test_data('gem_std.csv', as_file_obj=False)) gempak['YYMMDD/HHMM'] = pd.to_datetime(gempak['YYMMDD/HHMM'], format='%y%m%d/%H%M') gempak = gempak.set_index(['STN', 'YYMMDD/HHMM']) @@ -184,7 +187,7 @@ def test_ship_surface(): gsf = GempakSurface(get_test_data('gem_ship.sfc')) - gempak = pd.read_csv(get_test_data('gem_ship.csv')) + gempak = pd.read_csv(get_test_data('gem_ship.csv', as_file_obj=False)) gempak['YYMMDD/HHMM'] = pd.to_datetime(gempak['YYMMDD/HHMM'], format='%y%m%d/%H%M') gempak = gempak.set_index(['STN', 'YYMMDD/HHMM']) gempak.sort_index(inplace=True) @@ -212,7 +215,7 @@ def test_coordinates_creation(proj_type): decode_lat = grid.lat decode_lon = grid.lon - gempak = np.load(get_test_data(f'gem_{proj_type}.npz')) + gempak = np.load(get_test_data(f'gem_{proj_type}.npz', as_file_obj=False)) true_lat = gempak['lat'] true_lon = gempak['lon'] @@ -264,12 +267,10 @@ def test_date_parsing(): def test_surface_text(text_type, date_time): """Test text decoding of surface hourly and special observations.""" g = get_test_data('gem_surface_with_text.sfc') - d = get_test_data('gem_surface_with_text.csv') - gsf = GempakSurface(g) text = gsf.nearest_time(date_time, station_id='MSN')[0]['values'][text_type] - gempak = pd.read_csv(d) + gempak = pd.read_csv(get_test_data('gem_surface_with_text.csv', as_file_obj=False)) gem_text = gempak.loc[:, text_type.upper()][0] assert text == gem_text @@ -279,10 +280,9 @@ def test_surface_text(text_type, date_time): def test_sounding_text(text_type): """Test for proper decoding of coded message text.""" g = get_test_data('gem_unmerged_with_text.snd') - d = get_test_data('gem_unmerged_with_text.csv') - gso = GempakSounding(g).snxarray(station_id='OUN')[0] - gempak = pd.read_csv(d) + + gempak = pd.read_csv(get_test_data('gem_unmerged_with_text.csv', as_file_obj=False)) text = gso.attrs['WMO_CODES'][text_type] gem_text = gempak.loc[:, text_type.upper()][0] diff --git a/tests/io/test_nexrad.py b/tests/io/test_nexrad.py index 2d227021ac9..46f0cf98863 100644 --- a/tests/io/test_nexrad.py +++ b/tests/io/test_nexrad.py @@ -72,8 +72,15 @@ def read(self, n=None): """Read bytes.""" return self._f.read(n) + def close(self): + """Close object.""" + return self._f.close() + f = SeeklessReader(f) - Level2File(f) + + # Need to close manually (since we own the fboj) to avoid a warning + with contextlib.closing(f): + Level2File(f) def test_doubled_file(): From 689969d49dc2c3271f722f4428c94df96ebd3f8b Mon Sep 17 00:00:00 2001 From: Ryan May Date: Wed, 13 Dec 2023 14:27:30 -0700 Subject: [PATCH 33/36] MNT: Set warnings to trigger an error in pytest --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index d48e96a37a3..9610628a611 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -104,6 +104,7 @@ doctest_optionflags = "NORMALIZE_WHITESPACE" mpl-results-path = "test_output" xfail_strict = true filterwarnings = [ + "error", "ignore:numpy.ndarray size changed:RuntimeWarning", # To be removed in the next python-dateutil release. # See: https://github.com/dateutil/dateutil/issues/1314 From cf1351feee13764b39fc9942a5368cfd37c2df2c Mon Sep 17 00:00:00 2001 From: Ryan May Date: Wed, 13 Dec 2023 14:45:16 -0700 Subject: [PATCH 34/36] CI: Update cartopy download helper Adds a few missing files that were triggering downloads in tests. --- ci/download_cartopy_maps.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/ci/download_cartopy_maps.py b/ci/download_cartopy_maps.py index 68762ac0198..310eff218d7 100755 --- a/ci/download_cartopy_maps.py +++ b/ci/download_cartopy_maps.py @@ -21,9 +21,6 @@ def grab_ne(category, feature, res): for r in ['110m', '50m', '10m']: grab_ne('cultural', feat, r) - for feat, r in [('coastline', '10m'), ('coastline', '50m'), ('coastline', '110m'), - ('lakes', '10m'), ('lakes', '50m'), - ('land', '10m'), ('land', '50m'), ('land', '110m'), - ('ocean', '110m'), ('ocean', '50m'), - ('rivers_lake_centerlines', '10m'), ('rivers_lake_centerlines', '110m')]: - grab_ne('physical', feat, r) + for feat in ['coastline', 'lakes', 'land', 'ocean', 'rivers_lake_centerlines']: + for r in ['110m', '50m', '10m']: + grab_ne('physical', feat, r) From a83cfdd700913f7cf48d14b44927a1c4c1409ae9 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Wed, 13 Dec 2023 15:02:19 -0700 Subject: [PATCH 35/36] CI: Update pip/setuptools as part of setup This is to avoid a warning from setuptools_scm. --- .github/actions/install-pypi/action.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/actions/install-pypi/action.yml b/.github/actions/install-pypi/action.yml index abbda96c94e..db05ba94a75 100644 --- a/.github/actions/install-pypi/action.yml +++ b/.github/actions/install-pypi/action.yml @@ -44,6 +44,12 @@ runs: shell: bash run: echo "PIP_NO_BINARY=shapely" >> $GITHUB_ENV + - name: Upgrade pip and setuptools + shell: bash + run: | + python -m pip install --upgrade pip + python -m pip install --upgrade setuptools + - name: Set dependency groups for install shell: bash run: | From b3b2c3fea970b6a6edb2117c8b9f1ce596b5ef94 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Wed, 13 Dec 2023 15:24:17 -0700 Subject: [PATCH 36/36] CI: Update minimum supported xarray, pandas, and pint Moves to 0.21.0, 1.4.0, and 0.17, respectively. These are on the edge of our window but they help avoid deprecation warnings. --- pyproject.toml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 9610628a611..3d688bb5d73 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,13 +28,13 @@ requires-python = ">=3.9" dependencies = [ "matplotlib>=3.5.0", "numpy>=1.20.0", - "pandas>=1.2.0", - "pint>=0.15", + "pandas>=1.4.0", + "pint>=0.17", "pooch>=1.2.0", "pyproj>=3.0.0", "scipy>=1.8.0", "traitlets>=5.0.5", - "xarray>=0.18.0" + "xarray>=0.21.0" ] [project.entry-points."xarray.backends"]