Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adapt reader mviri_l1b_fiduceo_nc #2802

Open
wants to merge 29 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
f27a91d
fix for file reading; includes removing chunk reading and decoding ti…
bkremmli May 17, 2024
a08d9bc
Merge branch 'read_error' of https://github.com/bkremmli/satpy into r…
bkremmli May 17, 2024
2dbae1a
remove import datetime
bkremmli May 17, 2024
eb0382a
add bkremmli to AUTHORS.md
bkremmli May 17, 2024
7d6608a
correct for failures from hook id ruff
bkremmli May 17, 2024
ec22136
minor adaptations from PR comments
bkremmli May 17, 2024
5beedea
Update satpy/readers/mviri_l1b_fiduceo_nc.py
bkremmli May 17, 2024
21679c6
perform chunking after open_dataset and use decode_cf = False
bkremmli May 24, 2024
a7cb10d
Merge branch 'read_error' of https://github.com/bkremmli/satpy into r…
bkremmli May 24, 2024
59880ce
decode times separatly from other variables, adds TestInterpolator
bkremmli May 28, 2024
fb93f00
fixes _decode_cf() and tests
bkremmli Jun 4, 2024
951c9b0
adds test_fix_duplicate_dimensions and removes leftover dimensions "s…
bkremmli Jun 5, 2024
a379a08
Update mviri_l1b_fiduceo_nc.py
bkremmli Jun 5, 2024
5ec0cf9
adds support for filenames of MVIRI FCDR L1.5 release 2
bkremmli Jun 6, 2024
73acfb7
Merge pull request #1 from bkremmli/mviri_release2
bkremmli Jun 6, 2024
01856fd
Merge branch 'pytroll:main' into read_error
bkremmli Sep 3, 2024
c1cd334
sync/merge with fork diffs
bkremmli Sep 3, 2024
abf916e
adapt changes after xarray release 2024.7.0: include chunks with open…
bkremmli Sep 3, 2024
5822e50
removed chunks part from test_fix_duplicate_dimensions; adapted tests…
bkremmli Sep 3, 2024
f41e068
moved code _get_projection_longitude()
bkremmli Sep 4, 2024
6b079b1
fix test_fix_duplicate_dimensions
bkremmli Sep 4, 2024
fd099cd
- suppress warnings for renaming duplicate dimensions
bkremmli Sep 16, 2024
7c02d2d
define fill_val in test
bkremmli Sep 17, 2024
e90d7d0
ancillary changes like comments, etc.
bkremmli Sep 23, 2024
51d8e77
change fill_val definition in test, changes for Code Scene status
bkremmli Sep 24, 2024
89d9931
change fill_val to int64
bkremmli Sep 24, 2024
7d0ecef
include comments, create fixture_time_fake_dataset()
bkremmli Sep 24, 2024
11cf080
adapt fill_val definition
bkremmli Sep 24, 2024
03ec1fe
adapt fill_val definition
bkremmli Sep 24, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions AUTHORS.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ The following people have made contributions to this project:
- [Sauli Joro (sjoro)](https://github.com/sjoro)
- [Pouria Khalaj](https://github.com/pkhalaj)
- [Janne Kotro (jkotro)](https://github.com/jkotro)
- [Beke Kremmling (bkremmli)](https://github.com/bkremmli) - Deutscher Wetterdienst
- [Ralph Kuehn (ralphk11)](https://github.com/ralphk11)
- [Panu Lahtinen (pnuu)](https://github.com/pnuu)
- [Jussi Leinonen (jleinonen)](https://github.com/jleinonen) - meteoswiss
Expand Down
8 changes: 6 additions & 2 deletions satpy/etc/readers/mviri_l1b_fiduceo_nc.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,18 @@ file_types:
nc_easy:
file_reader: !!python/name:satpy.readers.mviri_l1b_fiduceo_nc.FiduceoMviriEasyFcdrFileHandler
file_patterns: [
'FIDUCEO_FCDR_{level}_{sensor}_{platform}-{projection_longitude:f}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_EASY_{processor_version}_{format_version}.nc'
'FIDUCEO_FCDR_{level}_{sensor}_{platform}-{projection_longitude:f}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_EASY_{processor_version}_{format_version}.nc',
# Example: FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_EASY_v2.6_fv3.1.nc
'{sensor}_FCDR-EASY_{level}_{platform}-E{projection_longitude:s}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_{release}.nc'
# Example: MVIRI_FCDR-EASY_L15_MET7-E0000_200607060600_200607060630_0200.nc
]
nc_full:
file_reader: !!python/name:satpy.readers.mviri_l1b_fiduceo_nc.FiduceoMviriFullFcdrFileHandler
file_patterns: [
'FIDUCEO_FCDR_{level}_{sensor}_{platform}-{projection_longitude:f}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_FULL_{processor_version}_{format_version}.nc'
'FIDUCEO_FCDR_{level}_{sensor}_{platform}-{projection_longitude:f}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_FULL_{processor_version}_{format_version}.nc',
# Example: FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_FULL_v2.6_fv3.1.nc
'{sensor}_FCDR-FULL_{level}_{platform}-E{projection_longitude:s}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_{release}.nc'
# Example: MVIRI_FCDR-FULL_L15_MET7-E0000_200607060600_200607060630_0200.nc
]

datasets:
Expand Down
54 changes: 48 additions & 6 deletions satpy/readers/mviri_l1b_fiduceo_nc.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,9 +162,9 @@

from satpy.readers._geos_area import get_area_definition, get_area_extent, sampling_to_lfac_cfac
from satpy.readers.file_handlers import BaseFileHandler
from satpy.utils import get_legacy_chunk_size
from satpy.utils import get_chunk_size_limit

CHUNK_SIZE = get_legacy_chunk_size()
CHUNK_SIZE = get_chunk_size_limit()
EQUATOR_RADIUS = 6378140.0
POLE_RADIUS = 6356755.0
ALTITUDE = 42164000.0 - EQUATOR_RADIUS
Expand All @@ -186,6 +186,8 @@
]
HIGH_RESOL = 2250

warnings.filterwarnings("ignore", message="^.*We do not yet support duplicate dimension names, but "
"we do allow initial construction of the object.*$")

class IRWVCalibrator:
"""Calibrate IR & WV channels."""
Expand Down Expand Up @@ -459,6 +461,10 @@
"""Wrap the given dataset."""
self.nc = nc

self._decode_cf()
self._fix_duplicate_dimensions(self.nc)


@property
def attrs(self):
"""Exposes dataset attributes."""
Expand Down Expand Up @@ -509,6 +515,31 @@
# satpy warnings.
ds.attrs.pop("ancillary_variables", None)

def _decode_cf(self):
"""Decode data."""
# time decoding with decode_cf results in error - decode separately!
time_dims, time = self._decode_time()
self.nc = self.nc.drop_vars(time.name)
self.nc = xr.decode_cf(self.nc)
self.nc[time.name] = (time_dims, time.values)

def _decode_time(self):
"""Decode time using fill value and offset."""
time = self.get_time()
time_dims = self.nc[time.name].dims
time = xr.where(time == time.attrs["_FillValue"], np.datetime64("NaT"),
(time + time.attrs["add_offset"]).astype("datetime64[s]").astype("datetime64[ns]"))

return (time_dims, time)

def _fix_duplicate_dimensions(self, nc):
sfinkens marked this conversation as resolved.
Show resolved Hide resolved
"""Rename dimensions as duplicate dimensions names are not supported by xarray."""
nc.variables["covariance_spectral_response_function_vis"].dims = ("srf_size_1", "srf_size_2")
self.nc = nc.drop_dims("srf_size")
nc.variables["channel_correlation_matrix_independent"].dims = ("channel_1", "channel_2")
nc.variables["channel_correlation_matrix_structured"].dims = ("channel_1", "channel_2")
self.nc = nc.drop_dims("channel")

def get_time(self):
"""Get time coordinate.

Expand Down Expand Up @@ -558,13 +589,17 @@
chunks={"x": CHUNK_SIZE,
"y": CHUNK_SIZE,
"x_ir_wv": CHUNK_SIZE,
"y_ir_wv": CHUNK_SIZE}
"y_ir_wv": CHUNK_SIZE},
# see dataset wrapper for why decoding is disabled
decode_cf=False,
sfinkens marked this conversation as resolved.
Show resolved Hide resolved
decode_times=False,
mask_and_scale=False,
)

self.nc = DatasetWrapper(nc_raw)

# Projection longitude is not provided in the file, read it from the
# filename.
self.projection_longitude = float(filename_info["projection_longitude"])
self.projection_longitude = self._get_projection_longitude(filename_info)

self.calib_coefs = self._get_calib_coefs()

self._get_angles = functools.lru_cache(maxsize=8)(
Expand All @@ -574,6 +609,13 @@
self._get_acq_time_uncached
)

def _get_projection_longitude(self, filename_info):
"""Read projection longitude from filename as it is not provided in the file."""
if "." in str(filename_info["projection_longitude"]):
sfinkens marked this conversation as resolved.
Show resolved Hide resolved
return float(filename_info["projection_longitude"])

return float(filename_info["projection_longitude"]) / 100

Check warning on line 617 in satpy/readers/mviri_l1b_fiduceo_nc.py

View check run for this annotation

Codecov / codecov/patch

satpy/readers/mviri_l1b_fiduceo_nc.py#L617

Added line #L617 was not covered by tests

def get_dataset(self, dataset_id, dataset_info):
"""Get the dataset."""
name = dataset_id["name"]
Expand Down
153 changes: 132 additions & 21 deletions satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,13 +36,16 @@
DatasetWrapper,
FiduceoMviriEasyFcdrFileHandler,
FiduceoMviriFullFcdrFileHandler,
Interpolator,
)
from satpy.tests.utils import make_dataid

# NOTE:
# The following fixtures are not defined in this file, but are used and injected by Pytest:
# - request

fill_val = np.uint32(4294967295) # FillValue defined to be windows-compatible

attrs_exp: dict = {
"platform": "MET7",
"raw_metadata": {"foo": "bar"},
Expand All @@ -61,8 +64,8 @@
{"sun_earth_distance_correction_applied": True,
"sun_earth_distance_correction_factor": 1.}
)
acq_time_vis_exp = [np.datetime64("1970-01-01 00:30").astype("datetime64[ns]"),
np.datetime64("1970-01-01 00:30").astype("datetime64[ns]"),
acq_time_vis_exp = [np.datetime64("NaT").astype("datetime64[ns]"),
np.datetime64("NaT").astype("datetime64[ns]"),
np.datetime64("1970-01-01 02:30").astype("datetime64[ns]"),
np.datetime64("1970-01-01 02:30").astype("datetime64[ns]")]
vis_counts_exp = xr.DataArray(
Expand All @@ -79,6 +82,7 @@
},
attrs=attrs_exp
)

vis_rad_exp = xr.DataArray(
np.array(
[[np.nan, 18.56, 38.28, 58.],
Expand Down Expand Up @@ -124,7 +128,10 @@
},
attrs=attrs_exp
)
acq_time_ir_wv_exp = [np.datetime64("1970-01-01 00:30").astype("datetime64[ns]"),

u_struct_refl_exp = u_vis_refl_exp.copy()

acq_time_ir_wv_exp = [np.datetime64("NaT"),
np.datetime64("1970-01-01 02:30").astype("datetime64[ns]")]
wv_counts_exp = xr.DataArray(
np.array(
Expand Down Expand Up @@ -249,41 +256,48 @@
height=2
)

@pytest.fixture(name="time_fake_dataset")
def fixture_time_fake_dataset():
"""Create time for fake dataset."""
time = np.arange(4) * 60 * 60
time[0] = fill_val
time[1] = fill_val
time = time.reshape(2, 2)

return time

@pytest.fixture(name="fake_dataset")
def fixture_fake_dataset():
def fixture_fake_dataset(time_fake_dataset):
"""Create fake dataset."""
count_ir = da.linspace(0, 255, 4, dtype=np.uint8).reshape(2, 2)
count_wv = da.linspace(0, 255, 4, dtype=np.uint8).reshape(2, 2)
count_vis = da.linspace(0, 255, 16, dtype=np.uint8).reshape(4, 4)
sza = da.from_array(
np.array(
[[45, 90],
[0, 45]],
[[45, 90], [0, 45]],
dtype=np.float32
)
)
mask = da.from_array(
np.array(
[[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 1, 0], # 1 = "invalid"
[0, 0, 0, 0]],
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 1, 0], [0, 0, 0, 0]], # 1 = "invalid"
dtype=np.uint8
)
)
time = np.arange(4) * 60 * 60 * 1e9
time = time.astype("datetime64[ns]").reshape(2, 2)

cov = da.from_array([[1, 2], [3, 4]])

ds = xr.Dataset(
data_vars={
"count_vis": (("y", "x"), count_vis),
"count_wv": (("y_ir_wv", "x_ir_wv"), count_wv),
"count_ir": (("y_ir_wv", "x_ir_wv"), count_ir),
"toa_bidirectional_reflectance_vis": vis_refl_exp / 100,
"u_independent_toa_bidirectional_reflectance": u_vis_refl_exp / 100,
"u_structured_toa_bidirectional_reflectance": u_vis_refl_exp / 100,
"quality_pixel_bitmask": (("y", "x"), mask),
"solar_zenith_angle": (("y_tie", "x_tie"), sza),
"time_ir_wv": (("y_ir_wv", "x_ir_wv"), time),
"time_ir_wv": (("y_ir_wv", "x_ir_wv"), time_fake_dataset),
"a_ir": -5.0,
"b_ir": 1.0,
"bt_a_ir": 10.0,
Expand All @@ -303,20 +317,26 @@ def fixture_fake_dataset():
"sub_satellite_longitude_end": np.nan,
"sub_satellite_latitude_start": np.nan,
"sub_satellite_latitude_end": 0.1,
"covariance_spectral_response_function_vis": (("srf_size", "srf_size"), cov),
"channel_correlation_matrix_independent": (("channel", "channel"), cov),
"channel_correlation_matrix_structured": (("channel", "channel"), cov)
},
coords={
"y": [1, 2, 3, 4],
"x": [1, 2, 3, 4],
"y_ir_wv": [1, 2],
"x_ir_wv": [1, 2],
"y_tie": [1, 2],
"x_tie": [1, 2]

"x_tie": [1, 2],
},
attrs={"foo": "bar"}
)
ds["count_ir"].attrs["ancillary_variables"] = "a_ir b_ir"
ds["count_wv"].attrs["ancillary_variables"] = "a_wv b_wv"
ds["quality_pixel_bitmask"].encoding["chunksizes"] = (2, 2)
ds["time_ir_wv"].attrs["_FillValue"] = fill_val
ds["time_ir_wv"].attrs["add_offset"] = 0

return ds


Expand Down Expand Up @@ -379,7 +399,8 @@ def test_init(self, file_handler):
("quality_pixel_bitmask", None, 2250, quality_pixel_bitmask_exp),
("solar_zenith_angle", None, 2250, sza_vis_exp),
("solar_zenith_angle", None, 4500, sza_ir_wv_exp),
("u_independent_toa_bidirectional_reflectance", None, 4500, u_vis_refl_exp)
("u_independent_toa_bidirectional_reflectance", None, 4500, u_vis_refl_exp),
("u_structured_toa_bidirectional_reflectance", None, 4500, u_struct_refl_exp)
]
)
def test_get_dataset(self, file_handler, name, calibration, resolution,
Expand Down Expand Up @@ -547,17 +568,50 @@ def test_file_pattern(self, reader):
"FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_FULL_v2.6_fv3.1.nc",
"FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_EASY_v2.6_fv3.1.nc",
"FIDUCEO_FCDR_L15_MVIRI_MET7-00.0_201701201000_201701201030_EASY_v2.6_fv3.1.nc",
"MVIRI_FCDR-EASY_L15_MET7-E0000_200607060600_200607060630_0200.nc",
"MVIRI_FCDR-EASY_L15_MET7-E5700_200607060600_200607060630_0200.nc",
"MVIRI_FCDR-FULL_L15_MET7-E0000_200607060600_200607060630_0200.nc",
"abcde",
]

files = reader.select_files_from_pathnames(filenames)
# only 3 out of 4 above should match
assert len(files) == 3
assert len(files) == 6


class TestDatasetWrapper:
"""Unit tests for DatasetWrapper class."""

def test_fix_duplicate_dimensions(self):
sfinkens marked this conversation as resolved.
Show resolved Hide resolved
"""Test the renaming of duplicate dimensions.

If duplicate dimensions are within the Dataset, opening the datasets with chunks throws a warning.
The dimensions need to be renamed.
"""
foo_time = 60*60
foo_time_exp = np.datetime64("1970-01-01 01:00").astype("datetime64[ns]")

foo = xr.Dataset(
data_vars={
"covariance_spectral_response_function_vis": (("srf_size", "srf_size"), [[1, 2], [3, 4]]),
"channel_correlation_matrix_independent": (("channel", "channel"), [[1, 2], [3, 4]]),
"channel_correlation_matrix_structured": (("channel", "channel"), [[1, 2], [3, 4]]),
"time_ir_wv": (("y_ir_wv", "x_ir_wv"), [[foo_time, foo_time], [foo_time, foo_time]],
{"_FillValue": fill_val, "add_offset": 0})
}
)
foo_ds = DatasetWrapper(foo)

foo_exp = xr.Dataset(
data_vars={
"covariance_spectral_response_function_vis": (("srf_size_1", "srf_size_2"), [[1, 2], [3, 4]]),
"channel_correlation_matrix_independent": (("channel_1", "channel_2"), [[1, 2], [3, 4]]),
"channel_correlation_matrix_structured": (("channel_1", "channel_2"), [[1, 2], [3, 4]]),
"time_ir_wv": (("y_ir_wv", "x_ir_wv"), [[foo_time_exp, foo_time_exp], [foo_time_exp, foo_time_exp]])
}
)

xr.testing.assert_allclose(foo_ds.nc, foo_exp)

def test_reassign_coords(self):
"""Test reassigning of coordinates.

Expand Down Expand Up @@ -588,6 +642,63 @@ def test_reassign_coords(self):
"x": [.3, .4]
}
)
ds = DatasetWrapper(nc)
foo = ds["foo"]
xr.testing.assert_equal(foo, foo_exp)
with mock.patch("satpy.readers.mviri_l1b_fiduceo_nc.DatasetWrapper._fix_duplicate_dimensions"):
with mock.patch("satpy.readers.mviri_l1b_fiduceo_nc.DatasetWrapper._decode_cf"):
ds = DatasetWrapper(nc)
foo = ds["foo"]
xr.testing.assert_equal(foo, foo_exp)

class TestInterpolator:
"""Unit tests for Interpolator class."""
@pytest.fixture(name="time_ir_wv")
def fixture_time_ir_wv(self):
"""Returns time_ir_wv."""
return xr.DataArray(
[
[np.datetime64("1970-01-01 01:00"), np.datetime64("1970-01-01 02:00")],
[np.datetime64("1970-01-01 03:00"), np.datetime64("1970-01-01 04:00")],
[np.datetime64("NaT"), np.datetime64("1970-01-01 06:00")],
[np.datetime64("NaT"), np.datetime64("NaT")],
],
dims=("y", "x"),
coords={"y": [1, 3, 5, 7]}
)

@pytest.fixture(name="acq_time_exp")
def fixture_acq_time_exp(self):
"""Returns acq_time_vis_exp."""
vis = xr.DataArray(
[
np.datetime64("1970-01-01 01:30"),
np.datetime64("1970-01-01 01:30"),
np.datetime64("1970-01-01 03:30"),
np.datetime64("1970-01-01 03:30"),
np.datetime64("1970-01-01 06:00"),
np.datetime64("1970-01-01 06:00"),
np.datetime64("NaT"),
np.datetime64("NaT")
],
dims="y",
coords={"y": [1, 2, 3, 4, 5, 6, 7, 8]}
)

ir = xr.DataArray(
[
np.datetime64("1970-01-01 01:30"),
np.datetime64("1970-01-01 03:30"),
np.datetime64("1970-01-01 06:00"),
np.datetime64("NaT"),
],
dims="y",
coords={"y": [1, 3, 5, 7]}
)

return vis, ir

def test_interp_acq_time(self, time_ir_wv, acq_time_exp):
"""Tests time interpolation."""
res_vis = Interpolator.interp_acq_time(time_ir_wv, target_y=acq_time_exp[0].coords["y"])
res_ir = Interpolator.interp_acq_time(time_ir_wv, target_y=acq_time_exp[1].coords["y"])

xr.testing.assert_allclose(res_vis, acq_time_exp[0])
xr.testing.assert_allclose(res_ir, acq_time_exp[1])
Loading