diff --git a/pysteps/io/importers.py b/pysteps/io/importers.py index 1493b6d4b..9cfaa17f9 100644 --- a/pysteps/io/importers.py +++ b/pysteps/io/importers.py @@ -1304,10 +1304,13 @@ def import_odim_hdf5(filename, qty="RATE", **kwargs): # check if the "what" group is in the "dataset" group if "what" in list(dsg[1].keys()): if "quantity" in dsg[1]["what"].attrs.keys(): - qty_, gain, offset, nodata, undetect = _read_opera_hdf5_what_group( - dsg[1]["what"] - ) - what_grp_found = True + try: + qty_, gain, offset, nodata, undetect = ( + _read_opera_hdf5_what_group(dsg[1]["what"]) + ) + what_grp_found = True + except KeyError: + pass for dg in dsg[1].items(): if dg[0][0:4] == "data": @@ -1480,7 +1483,7 @@ def import_opera_hdf5(filename, qty="RATE", **kwargs): def _read_opera_hdf5_what_group(whatgrp): - qty = whatgrp.attrs["quantity"] + qty = whatgrp.attrs["quantity"] if "quantity" in whatgrp.attrs.keys() else b"QIND" gain = whatgrp.attrs["gain"] if "gain" in whatgrp.attrs.keys() else 1.0 offset = whatgrp.attrs["offset"] if "offset" in whatgrp.attrs.keys() else 0.0 nodata = whatgrp.attrs["nodata"] if "nodata" in whatgrp.attrs.keys() else np.nan diff --git a/pysteps/tests/test_io_opera_hdf5.py b/pysteps/tests/test_io_opera_hdf5.py index 6b2214d49..52f86f7b8 100644 --- a/pysteps/tests/test_io_opera_hdf5.py +++ b/pysteps/tests/test_io_opera_hdf5.py @@ -9,19 +9,57 @@ pytest.importorskip("h5py") +# tests for three OPERA products: +# Odyssey rain rate composite (production discontinued on October 30th 2024) +# CIRRUS max. reflectivity composites +# NIMBUS rain rate composites root_path = pysteps.rcparams.data_sources["opera"]["root_path"] + filename = os.path.join(root_path, "20180824", "T_PAAH21_C_EUOC_20180824180000.hdf") -precip, _, metadata = pysteps.io.import_opera_hdf5(filename) +precip_odyssey, _, metadata_odyssey = pysteps.io.import_opera_hdf5(filename, qty="RATE") + +filename = os.path.join( + root_path, "20241126", "CIRRUS", "T_PABV21_C_EUOC_20241126010000.hdf" +) +precip_cirrus, _, metadata_cirrus = pysteps.io.import_opera_hdf5(filename, qty="DBZH") + +filename = os.path.join( + root_path, "20241126", "NIMBUS", "T_PAAH22_C_EUOC_20241126010000.hdf" +) +precip_nimbus_rain_rate, _, metadata_nimbus_rain_rate = pysteps.io.import_opera_hdf5( + filename, qty="RATE" +) + +filename = os.path.join( + root_path, "20241126", "NIMBUS", "T_PASH22_C_EUOC_20241126010000.hdf" +) +precip_nimbus_rain_accum, _, metadata_nimbus_rain_accum = pysteps.io.import_opera_hdf5( + filename, qty="ACRR" +) -def test_io_import_opera_hdf5_shape(): +def test_io_import_opera_hdf5_odyssey_shape(): """Test the importer OPERA HDF5.""" - assert precip.shape == (2200, 1900) + assert precip_odyssey.shape == (2200, 1900) -# test_metadata: list of (variable,expected, tolerance) tuples +def test_io_import_opera_hdf5_cirrus_shape(): + """Test the importer OPERA HDF5.""" + assert precip_cirrus.shape == (4400, 3800) + +def test_io_import_opera_hdf5_nimbus_rain_rate_shape(): + """Test the importer OPERA HDF5.""" + assert precip_nimbus_rain_rate.shape == (2200, 1900) + + +def test_io_import_opera_hdf5_nimbus_rain_accum_shape(): + """Test the importer OPERA HDF5.""" + assert precip_nimbus_rain_accum.shape == (2200, 1900) + + +# test_metadata: list of (variable,expected, tolerance) tuples expected_proj = ( "+proj=laea +lat_0=55.0 +lon_0=10.0 " "+x_0=1950000.0 " @@ -30,7 +68,7 @@ def test_io_import_opera_hdf5_shape(): ) # list of (variable,expected,tolerance) tuples -test_attrs = [ +test_odyssey_attrs = [ ("projection", expected_proj, None), ("ll_lon", -10.434576838640398, 1e-10), ("ll_lat", 31.746215319325056, 1e-10), @@ -53,7 +91,101 @@ def test_io_import_opera_hdf5_shape(): ] -@pytest.mark.parametrize("variable, expected, tolerance", test_attrs) -def test_io_import_mch_gif_dataset_attrs(variable, expected, tolerance): +@pytest.mark.parametrize("variable, expected, tolerance", test_odyssey_attrs) +def test_io_import_opera_hdf5_odyssey_dataset_attrs(variable, expected, tolerance): """Test the importer OPERA HDF5.""" - smart_assert(metadata[variable], expected, tolerance) + smart_assert(metadata_odyssey[variable], expected, tolerance) + + +# list of (variable,expected,tolerance) tuples +test_cirrus_attrs = [ + ("projection", expected_proj, None), + ("ll_lon", -10.4345768386404, 1e-10), + ("ll_lat", 31.7462153182675, 1e-10), + ("ur_lon", 57.8119647501499, 1e-10), + ("ur_lat", 67.6210371071631, 1e-10), + ("x1", -0.00027143326587975025, 1e-6), + ("y1", -4400000.00116988, 1e-10), + ("x2", 3800000.0000817003, 1e-10), + ("y2", -8.761277422308922e-05, 1e-6), + ("xpixelsize", 1000.0, 1e-10), + ("ypixelsize", 1000.0, 1e-10), + ("cartesian_unit", "m", None), + ("accutime", 15.0, 1e-10), + ("yorigin", "upper", None), + ("unit", "dBZ", None), + ("institution", "Odyssey datacentre", None), + ("transform", "dB", None), + ("zerovalue", -32.0, 1e-10), + ("threshold", -31.5, 1e-10), +] + + +@pytest.mark.parametrize("variable, expected, tolerance", test_cirrus_attrs) +def test_io_import_opera_hdf5_cirrus_dataset_attrs(variable, expected, tolerance): + """Test OPERA HDF5 importer: max. reflectivity composites from CIRRUS.""" + smart_assert(metadata_cirrus[variable], expected, tolerance) + + +# list of (variable,expected,tolerance) tuples +test_nimbus_rain_rate_attrs = [ + ("projection", expected_proj, None), + ("ll_lon", -10.434599999137568, 1e-10), + ("ll_lat", 31.74619995126678, 1e-10), + ("ur_lon", 57.8119032106317, 1e-10), + ("ur_lat", 67.62104536996274, 1e-10), + ("x1", -2.5302714337594807, 1e-6), + ("y1", -4400001.031169886, 1e-10), + ("x2", 3799997.4700817037, 1e-10), + ("y2", -1.0300876162946224, 1e-6), + ("xpixelsize", 2000.0, 1e-10), + ("ypixelsize", 2000.0, 1e-10), + ("cartesian_unit", "m", None), + ("accutime", 15.0, 1e-10), + ("yorigin", "upper", None), + ("unit", "mm/h", None), + ("institution", "Odyssey datacentre", None), + ("transform", None, None), + ("zerovalue", 0.0, 1e-10), + ("threshold", 0.01, 1e-10), +] + + +@pytest.mark.parametrize("variable, expected, tolerance", test_nimbus_rain_rate_attrs) +def test_io_import_opera_hdf5_nimbus_rain_rate_dataset_attrs( + variable, expected, tolerance +): + """Test OPERA HDF5 importer: rain rate composites from NIMBUS.""" + smart_assert(metadata_nimbus_rain_rate[variable], expected, tolerance) + + +# list of (variable,expected,tolerance) tuples +test_nimbus_rain_accum_attrs = [ + ("projection", expected_proj, None), + ("ll_lon", -10.434599999137568, 1e-10), + ("ll_lat", 31.74619995126678, 1e-10), + ("ur_lon", 57.8119032106317, 1e-10), + ("ur_lat", 67.62104536996274, 1e-10), + ("x1", -2.5302714337594807, 1e-6), + ("y1", -4400001.031169886, 1e-10), + ("x2", 3799997.4700817037, 1e-10), + ("y2", -1.0300876162946224, 1e-6), + ("xpixelsize", 2000.0, 1e-10), + ("ypixelsize", 2000.0, 1e-10), + ("cartesian_unit", "m", None), + ("accutime", 15.0, 1e-10), + ("yorigin", "upper", None), + ("unit", "mm", None), + ("institution", "Odyssey datacentre", None), + ("transform", None, None), + ("zerovalue", 0.0, 1e-10), + ("threshold", 0.01, 1e-10), +] + + +@pytest.mark.parametrize("variable, expected, tolerance", test_nimbus_rain_accum_attrs) +def test_io_import_opera_hdf5_nimbus_rain_accum_dataset_attrs( + variable, expected, tolerance +): + """Test OPERA HDF5 importer: rain accumulation composites from NIMBUS.""" + smart_assert(metadata_nimbus_rain_accum[variable], expected, tolerance)