diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0bd05f273d..6e1fdfc781 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -11,31 +11,9 @@ env: CACHE_NUMBER: 0 jobs: - lint: - name: lint and style checks - runs-on: ubuntu-latest - steps: - - name: Checkout source - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: 3.9 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install flake8 flake8-docstrings flake8-debugger flake8-bugbear pytest - - name: Install Satpy - run: | - pip install -e . - - name: Run linting - run: | - flake8 satpy/ - test: runs-on: ${{ matrix.os }} continue-on-error: ${{ matrix.experimental }} - needs: [lint] strategy: fail-fast: true matrix: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5bf64d25da..eb21aa6601 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,12 +1,11 @@ exclude: '^$' fail_fast: false repos: - - repo: https://github.com/PyCQA/flake8 - rev: 6.1.0 + - repo: https://github.com/charliermarsh/ruff-pre-commit + # Ruff version. + rev: 'v0.0.247' hooks: - - id: flake8 - additional_dependencies: [flake8-docstrings, flake8-debugger, flake8-bugbear, mccabe] - args: [--max-complexity, "10"] + - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.5.0 hooks: diff --git a/benchmarks/abi_l1b_benchmarks.py b/benchmarks/abi_l1b_benchmarks.py index b52cb46abd..936e0dc514 100644 --- a/benchmarks/abi_l1b_benchmarks.py +++ b/benchmarks/abi_l1b_benchmarks.py @@ -43,7 +43,7 @@ def setup_cache(self): if len(get_filenames(self.subdir)) != 16: raise RuntimeError("Existing data files do not match the expected number of files.") download_rsr() - download_luts(aerosol_type='rayleigh_only') + download_luts(aerosol_type="rayleigh_only") def setup(self): """Set up the benchmarks.""" diff --git a/benchmarks/ahi_hsd_benchmarks.py b/benchmarks/ahi_hsd_benchmarks.py index 9b78ae8ac5..361934168a 100644 --- a/benchmarks/ahi_hsd_benchmarks.py +++ b/benchmarks/ahi_hsd_benchmarks.py @@ -33,7 +33,7 @@ class HimawariHSD(GeoBenchmarks): timeout = 600 data_files: list[str] = [] subdir = os.path.join("ahi_hsd", "20210417_0500_typhoon_surigae") - reader = 'ahi_hsd' + reader = "ahi_hsd" def setup_cache(self): """Fetch the data files.""" @@ -43,7 +43,7 @@ def setup_cache(self): except ImportError: assert len(get_filenames(self.subdir)) == 4 # nosec download_rsr() - download_luts(aerosol_type='rayleigh_only') + download_luts(aerosol_type="rayleigh_only") def setup(self): """Set up the benchmarks.""" diff --git a/benchmarks/seviri_hrit_benchmarks.py b/benchmarks/seviri_hrit_benchmarks.py index 177d929adf..9851dbdac9 100644 --- a/benchmarks/seviri_hrit_benchmarks.py +++ b/benchmarks/seviri_hrit_benchmarks.py @@ -43,7 +43,7 @@ def setup_cache(self): except ImportError: assert len(get_filenames(self.subdir)) == 114 # nosec download_rsr() - download_luts(aerosol_type='rayleigh_only') + download_luts(aerosol_type="rayleigh_only") def setup(self): """Set up the benchmarks.""" diff --git a/benchmarks/utils.py b/benchmarks/utils.py index 54338d4eac..67b88025b9 100644 --- a/benchmarks/utils.py +++ b/benchmarks/utils.py @@ -64,7 +64,7 @@ def save_composite_as_geotiff(self, composite, resampler="native", area=None, filenames=None): """Save a composite to disk as geotiff.""" lscn = self.load_and_resample(composite, resampler, area, filenames) - lscn.save_dataset(composite, filename='test.tif', tiled=True) + lscn.save_dataset(composite, filename="test.tif", tiled=True) def compute_channel(self, channel, filenames=None): """Load and compute one channel.""" diff --git a/benchmarks/viirs_sdr_benchmarks.py b/benchmarks/viirs_sdr_benchmarks.py index 940c2d524b..68db5c6682 100644 --- a/benchmarks/viirs_sdr_benchmarks.py +++ b/benchmarks/viirs_sdr_benchmarks.py @@ -42,7 +42,7 @@ def setup_cache(self): except ImportError: assert len(self.get_filenames()) == 6 * 3 # nosec download_rsr() - download_luts(aerosol_type='rayleigh_only') + download_luts(aerosol_type="rayleigh_only") def setup(self, name): """Set up the benchmarks.""" @@ -58,14 +58,14 @@ def get_filenames(self): def load(self, composite): """Load one composite.""" from satpy import Scene - scn = Scene(filenames=self.data_files, reader='viirs_sdr') + scn = Scene(filenames=self.data_files, reader="viirs_sdr") scn.load([composite]) return scn def load_and_native_resample(self, composite): """Load and native resample a composite.""" scn = self.load(composite) - lscn = scn.resample(resampler='native') + lscn = scn.resample(resampler="native") return lscn @@ -119,4 +119,4 @@ def compute_composite(self, name): def save_composite_as_geotiff(self, name): """Save a composite to disk as geotiff.""" lscn = self.load_and_native_resample(name) - lscn.save_dataset(name, filename='test.tif', tiled=True) + lscn.save_dataset(name, filename="test.tif", tiled=True) diff --git a/doc/source/conf.py b/doc/source/conf.py index 4af8d63b4a..3bef218f89 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -23,7 +23,7 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.append(os.path.abspath('../../')) +sys.path.append(os.path.abspath("../../")) sys.path.append(os.path.abspath(os.path.dirname(__file__))) from reader_table import generate_reader_table # noqa: E402 @@ -33,10 +33,10 @@ # built documents. # # get version using setuptools-scm -release = get_distribution('satpy').version +release = get_distribution("satpy").version # The full version, including alpha/beta/rc tags. # for example take major/minor -version = '.'.join(release.split('.')[:2]) +version = ".".join(release.split(".")[:2]) class Mock(object): # noqa @@ -53,8 +53,8 @@ def __call__(self, *args, **kwargs): @classmethod def __getattr__(cls, name): """Mock common module attributes used in documentation.""" - if name in ('__file__', '__path__'): - return '/dev/null' + if name in ("__file__", "__path__"): + return "/dev/null" elif name[0] == name[0].upper(): mockType = type(name, (), {}) mockType.__module__ = __name__ @@ -66,15 +66,15 @@ def __getattr__(cls, name): # https://github.com/sphinx-doc/sphinx/issues/3920 -MOCK_MODULES = ['h5py'] +MOCK_MODULES = ["h5py"] for mod_name in MOCK_MODULES: sys.modules[mod_name] = Mock() # type: ignore -autodoc_mock_imports = ['cf', 'glymur', 'h5netcdf', 'imageio', 'mipp', 'netCDF4', - 'pygac', 'pygrib', 'pyhdf', 'pyninjotiff', - 'pyorbital', 'pyspectral', 'rasterio', 'trollimage', - 'zarr'] -autoclass_content = 'both' # append class __init__ docstring to the class docstring +autodoc_mock_imports = ["cf", "glymur", "h5netcdf", "imageio", "mipp", "netCDF4", + "pygac", "pygrib", "pyhdf", "pyninjotiff", + "pyorbital", "pyspectral", "rasterio", "trollimage", + "zarr"] +autoclass_content = "both" # append class __init__ docstring to the class docstring # auto generate reader table from reader config files with open("reader_table.rst", mode="w") as f: @@ -84,19 +84,19 @@ def __getattr__(cls, name): # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', - 'sphinx.ext.doctest', 'sphinx.ext.napoleon', 'sphinx.ext.autosummary', 'doi_role', - 'sphinx.ext.viewcode', 'sphinxcontrib.apidoc', - 'sphinx.ext.mathjax'] +extensions = ["sphinx.ext.autodoc", "sphinx.ext.intersphinx", "sphinx.ext.todo", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.autosummary", "doi_role", + "sphinx.ext.viewcode", "sphinxcontrib.apidoc", + "sphinx.ext.mathjax"] # API docs apidoc_module_dir = "../../satpy" apidoc_output_dir = "api" apidoc_excluded_paths = [ - 'readers/caliop_l2_cloud.py', - 'readers/ghrsst_l3c_sst.py', - 'readers/li_l2.py', - 'readers/scatsat1_l2b.py', + "readers/caliop_l2_cloud.py", + "readers/ghrsst_l3c_sst.py", + "readers/li_l2.py", + "readers/scatsat1_l2b.py", ] apidoc_separate_modules = True apidoc_extra_args = [ @@ -104,20 +104,20 @@ def __getattr__(cls, name): ] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. # source_encoding = 'utf-8' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'Satpy' -copyright = u'2009-{}, The PyTroll Team'.format(datetime.utcnow().strftime("%Y")) +project = u"Satpy" +copyright = u"2009-{}, The PyTroll Team".format(datetime.utcnow().strftime("%Y")) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -151,7 +151,7 @@ def __getattr__(cls, name): # show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] @@ -161,7 +161,7 @@ def __getattr__(cls, name): # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. -html_theme = 'sphinx_rtd_theme' +html_theme = "sphinx_rtd_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -190,16 +190,16 @@ def __getattr__(cls, name): # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] html_css_files = [ - 'theme_overrides.css', # override wide tables in RTD theme - 'https://cdn.datatables.net/1.10.23/css/jquery.dataTables.min.css', + "theme_overrides.css", # override wide tables in RTD theme + "https://cdn.datatables.net/1.10.23/css/jquery.dataTables.min.css", ] html_js_files = [ - 'https://cdn.datatables.net/1.10.23/js/jquery.dataTables.min.js', - 'main.js', + "https://cdn.datatables.net/1.10.23/js/jquery.dataTables.min.js", + "main.js", ] @@ -239,7 +239,7 @@ def __getattr__(cls, name): # html_file_suffix = '' # Output file base name for HTML help builder. -htmlhelp_basename = 'NWCSAFMSGPPdoc' +htmlhelp_basename = "NWCSAFMSGPPdoc" # -- Options for LaTeX output -------------------------------------------------- @@ -253,8 +253,8 @@ def __getattr__(cls, name): # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'satpy.tex', 'Satpy Documentation', - 'Satpy Developers', 'manual'), + ("index", "satpy.tex", "Satpy Documentation", + "Satpy Developers", "manual"), ] # The name of an image file (relative to this directory) to place at the top of @@ -277,22 +277,22 @@ def __getattr__(cls, name): # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - 'dask': ('https://docs.dask.org/en/latest', None), - 'geoviews': ('http://geoviews.org', None), - 'jobqueue': ('https://jobqueue.dask.org/en/latest', None), - 'numpy': ('https://numpy.org/doc/stable', None), - 'pydecorate': ('https://pydecorate.readthedocs.io/en/stable', None), - 'pyorbital': ('https://pyorbital.readthedocs.io/en/stable', None), - 'pyproj': ('https://pyproj4.github.io/pyproj/dev', None), - 'pyresample': ('https://pyresample.readthedocs.io/en/stable', None), - 'pytest': ('https://docs.pytest.org/en/stable/', None), - 'python': ('https://docs.python.org/3', None), - 'scipy': ('http://scipy.github.io/devdocs', None), - 'trollimage': ('https://trollimage.readthedocs.io/en/stable', None), - 'trollsift': ('https://trollsift.readthedocs.io/en/stable', None), - 'xarray': ('https://xarray.pydata.org/en/stable', None), - 'rasterio': ('https://rasterio.readthedocs.io/en/latest', None), - 'donfig': ('https://donfig.readthedocs.io/en/latest', None), - 'pooch': ('https://www.fatiando.org/pooch/latest/', None), - 'fsspec': ('https://filesystem-spec.readthedocs.io/en/latest/', None), + "dask": ("https://docs.dask.org/en/latest", None), + "geoviews": ("http://geoviews.org", None), + "jobqueue": ("https://jobqueue.dask.org/en/latest", None), + "numpy": ("https://numpy.org/doc/stable", None), + "pydecorate": ("https://pydecorate.readthedocs.io/en/stable", None), + "pyorbital": ("https://pyorbital.readthedocs.io/en/stable", None), + "pyproj": ("https://pyproj4.github.io/pyproj/dev", None), + "pyresample": ("https://pyresample.readthedocs.io/en/stable", None), + "pytest": ("https://docs.pytest.org/en/stable/", None), + "python": ("https://docs.python.org/3", None), + "scipy": ("http://scipy.github.io/devdocs", None), + "trollimage": ("https://trollimage.readthedocs.io/en/stable", None), + "trollsift": ("https://trollsift.readthedocs.io/en/stable", None), + "xarray": ("https://xarray.pydata.org/en/stable", None), + "rasterio": ("https://rasterio.readthedocs.io/en/latest", None), + "donfig": ("https://donfig.readthedocs.io/en/latest", None), + "pooch": ("https://www.fatiando.org/pooch/latest/", None), + "fsspec": ("https://filesystem-spec.readthedocs.io/en/latest/", None), } diff --git a/doc/source/doi_role.py b/doc/source/doi_role.py index b7c64a14ac..115e7895c6 100644 --- a/doc/source/doi_role.py +++ b/doc/source/doi_role.py @@ -26,9 +26,9 @@ def doi_role(typ, rawtext, text, lineno, inliner, options=None, content=None): content = [] text = utils.unescape(text) has_explicit_title, title, part = split_explicit_title(text) - full_url = 'https://doi.org/' + part + full_url = "https://doi.org/" + part if not has_explicit_title: - title = 'DOI:' + part + title = "DOI:" + part pnode = nodes.reference(title, title, internal=False, refuri=full_url) return [pnode], [] @@ -40,20 +40,20 @@ def arxiv_role(typ, rawtext, text, lineno, inliner, options=None, content=None): content = [] text = utils.unescape(text) has_explicit_title, title, part = split_explicit_title(text) - full_url = 'https://arxiv.org/abs/' + part + full_url = "https://arxiv.org/abs/" + part if not has_explicit_title: - title = 'arXiv:' + part + title = "arXiv:" + part pnode = nodes.reference(title, title, internal=False, refuri=full_url) return [pnode], [] def setup_link_role(app): - app.add_role('doi', doi_role, override=True) - app.add_role('DOI', doi_role, override=True) - app.add_role('arXiv', arxiv_role, override=True) - app.add_role('arxiv', arxiv_role, override=True) + app.add_role("doi", doi_role, override=True) + app.add_role("DOI", doi_role, override=True) + app.add_role("arXiv", arxiv_role, override=True) + app.add_role("arxiv", arxiv_role, override=True) def setup(app): - app.connect('builder-inited', setup_link_role) - return {'version': '0.1', 'parallel_read_safe': True} + app.connect("builder-inited", setup_link_role) + return {"version": "0.1", "parallel_read_safe": True} diff --git a/doc/source/reader_table.py b/doc/source/reader_table.py index 1c6760a390..3ddec3444b 100644 --- a/doc/source/reader_table.py +++ b/doc/source/reader_table.py @@ -28,6 +28,7 @@ def rst_table_row(columns=None): Args: columns (list[str]): Content of each column. + Returns: str """ @@ -48,6 +49,7 @@ def rst_table_header(name=None, header=None, header_rows=1, widths="auto"): width (optional[list[int]]): Width of each column as a list. If not specified defaults to auto and will therefore determined by the backend (see ) + Returns: str """ diff --git a/pyproject.toml b/pyproject.toml index 64c68d60eb..1282120a59 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,3 +12,21 @@ skip_gitignore = true default_section = "THIRDPARTY" known_first_party = "satpy" line_length = 120 + +[tool.ruff] +# See https://docs.astral.sh/ruff/rules/ +# In the future, add "A", "B", "S", "N", "D" +select = ["E", "W", "F", "I", "PT", "TID", "C90", "Q", "T10", "T20"] +line-length = 120 + +[tool.ruff.per-file-ignores] +"satpy/tests/*" = ["S101"] # assert allowed in tests +"utils/coord2area_def.py" = ["T201"] # allow print +"fetch_avhrr_calcoeffs.py" = ["T201"] # allow print + +[tool.ruff.pydocstyle] +convention = "google" + +[tool.ruff.mccabe] +# Unlike Flake8, default to a complexity level of 10. +max-complexity = 10 diff --git a/satpy/__init__.py b/satpy/__init__.py index 5392e0d9dd..d90f65d892 100644 --- a/satpy/__init__.py +++ b/satpy/__init__.py @@ -35,4 +35,4 @@ from satpy.utils import get_logger # noqa from satpy.writers import available_writers # noqa -log = get_logger('satpy') +log = get_logger("satpy") diff --git a/satpy/_config.py b/satpy/_config.py index 7a0d7aaac3..6a14f994a8 100644 --- a/satpy/_config.py +++ b/satpy/_config.py @@ -38,21 +38,21 @@ BASE_PATH = os.path.dirname(os.path.realpath(__file__)) # FIXME: Use package_resources? -PACKAGE_CONFIG_PATH = os.path.join(BASE_PATH, 'etc') +PACKAGE_CONFIG_PATH = os.path.join(BASE_PATH, "etc") -_satpy_dirs = appdirs.AppDirs(appname='satpy', appauthor='pytroll') +_satpy_dirs = appdirs.AppDirs(appname="satpy", appauthor="pytroll") _CONFIG_DEFAULTS = { - 'tmp_dir': tempfile.gettempdir(), - 'cache_dir': _satpy_dirs.user_cache_dir, - 'cache_lonlats': False, - 'cache_sensor_angles': False, - 'config_path': [], - 'data_dir': _satpy_dirs.user_data_dir, - 'demo_data_dir': '.', - 'download_aux': True, - 'sensor_angles_position_preference': 'actual', - 'readers': { - 'clip_negative_radiances': False, + "tmp_dir": tempfile.gettempdir(), + "cache_dir": _satpy_dirs.user_cache_dir, + "cache_lonlats": False, + "cache_sensor_angles": False, + "config_path": [], + "data_dir": _satpy_dirs.user_data_dir, + "demo_data_dir": ".", + "download_aux": True, + "sensor_angles_position_preference": "actual", + "readers": { + "clip_negative_radiances": False, }, } @@ -68,17 +68,17 @@ # 5. ~/.satpy/satpy.yaml # 6. $SATPY_CONFIG_PATH/satpy.yaml if present (colon separated) _CONFIG_PATHS = [ - os.path.join(PACKAGE_CONFIG_PATH, 'satpy.yaml'), - os.getenv('SATPY_ROOT_CONFIG', os.path.join('/etc', 'satpy', 'satpy.yaml')), - os.path.join(sys.prefix, 'etc', 'satpy', 'satpy.yaml'), - os.path.join(_satpy_dirs.user_config_dir, 'satpy.yaml'), - os.path.join(os.path.expanduser('~'), '.satpy', 'satpy.yaml'), + os.path.join(PACKAGE_CONFIG_PATH, "satpy.yaml"), + os.getenv("SATPY_ROOT_CONFIG", os.path.join("/etc", "satpy", "satpy.yaml")), + os.path.join(sys.prefix, "etc", "satpy", "satpy.yaml"), + os.path.join(_satpy_dirs.user_config_dir, "satpy.yaml"), + os.path.join(os.path.expanduser("~"), ".satpy", "satpy.yaml"), ] # The above files can also be directories. If directories all files # with `.yaml`., `.yml`, or `.json` extensions will be used. -_ppp_config_dir = os.getenv('PPP_CONFIG_DIR', None) -_satpy_config_path = os.getenv('SATPY_CONFIG_PATH', None) +_ppp_config_dir = os.getenv("PPP_CONFIG_DIR", None) +_satpy_config_path = os.getenv("SATPY_CONFIG_PATH", None) if _ppp_config_dir is not None and _satpy_config_path is None: LOG.warning("'PPP_CONFIG_DIR' is deprecated. Please use 'SATPY_CONFIG_PATH' instead.") @@ -94,22 +94,22 @@ # i.e. last-applied/highest priority to first-applied/lowest priority _satpy_config_path_list = _satpy_config_path.split(os.pathsep) - os.environ['SATPY_CONFIG_PATH'] = repr(_satpy_config_path_list) + os.environ["SATPY_CONFIG_PATH"] = repr(_satpy_config_path_list) for config_dir in _satpy_config_path_list: - _CONFIG_PATHS.append(os.path.join(config_dir, 'satpy.yaml')) + _CONFIG_PATHS.append(os.path.join(config_dir, "satpy.yaml")) -_ancpath = os.getenv('SATPY_ANCPATH', None) -_data_dir = os.getenv('SATPY_DATA_DIR', None) +_ancpath = os.getenv("SATPY_ANCPATH", None) +_data_dir = os.getenv("SATPY_DATA_DIR", None) if _ancpath is not None and _data_dir is None: LOG.warning("'SATPY_ANCPATH' is deprecated. Please use 'SATPY_DATA_DIR' instead.") - os.environ['SATPY_DATA_DIR'] = _ancpath + os.environ["SATPY_DATA_DIR"] = _ancpath config = Config("satpy", defaults=[_CONFIG_DEFAULTS], paths=_CONFIG_PATHS) def get_config_path_safe(): """Get 'config_path' and check for proper 'list' type.""" - config_path = config.get('config_path') + config_path = config.get("config_path") if not isinstance(config_path, list): raise ValueError("Satpy config option 'config_path' must be a " "list, not '{}'".format(type(config_path))) @@ -125,7 +125,7 @@ def get_entry_points_config_dirs(group_name: str, include_config_path: bool = Tr if not dirs or dirs[-1] != new_dir: dirs.append(new_dir) if include_config_path: - dirs.extend(config.get('config_path')[::-1]) + dirs.extend(config.get("config_path")[::-1]) return dirs diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py index 25fe728b9f..4eb9826850 100644 --- a/satpy/_scene_converters.py +++ b/satpy/_scene_converters.py @@ -45,7 +45,7 @@ def to_xarray(scn, include_lonlats=True, epoch=None, include_orig_name=True, - numeric_name_prefix='CHANNEL_'): + numeric_name_prefix="CHANNEL_"): """Merge all xr.DataArray(s) of a satpy.Scene to a CF-compliant xarray object. If all Scene DataArrays are on the same area, it returns an xr.Dataset. @@ -84,7 +84,7 @@ def to_xarray(scn, Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. - Returns + Returns: ------- ds, xr.Dataset A CF-compliant xr.Dataset diff --git a/satpy/aux_download.py b/satpy/aux_download.py index 82095737f0..5d9f0630e3 100644 --- a/satpy/aux_download.py +++ b/satpy/aux_download.py @@ -68,14 +68,14 @@ def _generate_filename(filename, component_type): return None path = filename if component_type: - path = '/'.join([component_type, path]) + path = "/".join([component_type, path]) return path def _retrieve_offline(data_dir, cache_key): - logger.debug('Downloading auxiliary files is turned off, will check ' - 'local files.') - local_file = os.path.join(data_dir, *cache_key.split('/')) + logger.debug("Downloading auxiliary files is turned off, will check " + "local files.") + local_file = os.path.join(data_dir, *cache_key.split("/")) if not os.path.isfile(local_file): raise RuntimeError("Satpy 'download_aux' setting is False meaning " "no new files will be downloaded and the local " @@ -85,7 +85,7 @@ def _retrieve_offline(data_dir, cache_key): def _should_download(cache_key): """Check if we're running tests and can download this file.""" - return not RUNNING_TESTS or 'README' in cache_key + return not RUNNING_TESTS or "README" in cache_key def retrieve(cache_key, pooch_kwargs=None): @@ -107,8 +107,8 @@ def retrieve(cache_key, pooch_kwargs=None): """ pooch_kwargs = pooch_kwargs or {} - path = satpy.config.get('data_dir') - if not satpy.config.get('download_aux'): + path = satpy.config.get("data_dir") + if not satpy.config.get("download_aux"): return _retrieve_offline(path, cache_key) if not _should_download(cache_key): raise RuntimeError("Auxiliary data download is not allowed during " @@ -123,7 +123,7 @@ def retrieve(cache_key, pooch_kwargs=None): def _retrieve_all_with_pooch(pooch_kwargs): if pooch_kwargs is None: pooch_kwargs = {} - path = satpy.config.get('data_dir') + path = satpy.config.get("data_dir") pooch_obj = pooch.create(path, path, registry=_FILE_REGISTRY, urls=_FILE_URLS) for fname in _FILE_REGISTRY: @@ -153,7 +153,7 @@ def retrieve_all(readers=None, writers=None, composite_sensors=None, ``fetch``. """ - if not satpy.config.get('download_aux'): + if not satpy.config.get("download_aux"): raise RuntimeError("Satpy 'download_aux' setting is False so no files " "will be downloaded.") @@ -305,11 +305,11 @@ def __init__(self, *args, **kwargs): """ DATA_FILE_COMPONENTS = { - 'reader': 'readers', - 'writer': 'writers', - 'composit': 'composites', - 'modifi': 'modifiers', - 'corr': 'modifiers', + "reader": "readers", + "writer": "writers", + "composit": "composites", + "modifi": "modifiers", + "corr": "modifiers", } @property @@ -318,7 +318,7 @@ def _data_file_component_type(self): for cls_name_sub, comp_type in self.DATA_FILE_COMPONENTS.items(): if cls_name_sub in cls_name: return comp_type - return 'other' + return "other" def register_data_files(self, data_files=None): """Register a series of files that may be downloaded later. @@ -330,8 +330,8 @@ def register_data_files(self, data_files=None): """ comp_type = self._data_file_component_type if data_files is None: - df_parent = getattr(self, 'info', self.config) - data_files = df_parent.get('data_files', []) + df_parent = getattr(self, "info", self.config) + data_files = df_parent.get("data_files", []) cache_keys = [] for data_file_entry in data_files: cache_key = self._register_data_file(data_file_entry, comp_type) @@ -340,9 +340,9 @@ def register_data_files(self, data_files=None): @staticmethod def _register_data_file(data_file_entry, comp_type): - url = data_file_entry['url'] - filename = data_file_entry.get('filename', os.path.basename(url)) - known_hash = data_file_entry.get('known_hash') + url = data_file_entry["url"] + filename = data_file_entry.get("filename", os.path.basename(url)) + known_hash = data_file_entry.get("known_hash") return register_file(url, filename, component_type=comp_type, known_hash=known_hash) @@ -351,20 +351,20 @@ def retrieve_all_cmd(argv=None): """Call 'retrieve_all' function from console script 'satpy_retrieve_all'.""" import argparse parser = argparse.ArgumentParser(description="Download auxiliary data files used by Satpy.") - parser.add_argument('--data-dir', + parser.add_argument("--data-dir", help="Override 'SATPY_DATA_DIR' for destination of " "downloaded files. This does NOT change the " "directory Satpy will look at when searching " "for files outside of this script.") - parser.add_argument('--composite-sensors', nargs="*", + parser.add_argument("--composite-sensors", nargs="*", help="Limit loaded composites for the specified " "sensors. If specified with no arguments, " "no composite files will be downloaded.") - parser.add_argument('--readers', nargs="*", + parser.add_argument("--readers", nargs="*", help="Limit searching to these readers. If specified " "with no arguments, no reader files will be " "downloaded.") - parser.add_argument('--writers', nargs="*", + parser.add_argument("--writers", nargs="*", help="Limit searching to these writers. If specified " "with no arguments, no writer files will be " "downloaded.") @@ -373,7 +373,7 @@ def retrieve_all_cmd(argv=None): logging.basicConfig(level=logging.INFO) if args.data_dir is None: - args.data_dir = satpy.config.get('data_dir') + args.data_dir = satpy.config.get("data_dir") with satpy.config.set(data_dir=args.data_dir): retrieve_all(readers=args.readers, writers=args.writers, diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index da4d1a9e5c..f6b1b13150 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -34,12 +34,12 @@ LOG = logging.getLogger(__name__) -NEGLIGIBLE_COORDS = ['time'] +NEGLIGIBLE_COORDS = ["time"] """Keywords identifying non-dimensional coordinates to be ignored during composite generation.""" -MASKING_COMPOSITOR_METHODS = ['less', 'less_equal', 'equal', 'greater_equal', - 'greater', 'not_equal', 'isnan', 'isfinite', - 'isneginf', 'isposinf'] +MASKING_COMPOSITOR_METHODS = ["less", "less_equal", "equal", "greater_equal", + "greater", "not_equal", "isnan", "isfinite", + "isneginf", "isposinf"] class IncompatibleAreas(Exception): @@ -55,8 +55,8 @@ def check_times(projectables): times = [] for proj in projectables: try: - if proj['time'].size and proj['time'][0] != 0: - times.append(proj['time'][0].values) + if proj["time"].size and proj["time"][0] != 0: + times.append(proj["time"][0].values) else: break # right? except KeyError: @@ -64,13 +64,13 @@ def check_times(projectables): break except IndexError: # time is a scalar - if proj['time'].values != 0: - times.append(proj['time'].values) + if proj["time"].values != 0: + times.append(proj["time"].values) else: break else: # Is there a more gracious way to handle this ? - if np.max(times) - np.min(times) > np.timedelta64(1, 's'): + if np.max(times) - np.min(times) > np.timedelta64(1, "s"): raise IncompatibleTimes mid_time = (np.max(times) - np.min(times)) / 2 + np.min(times) return mid_time @@ -79,9 +79,9 @@ def check_times(projectables): def sub_arrays(proj1, proj2): """Substract two DataArrays and combine their attrs.""" attrs = combine_metadata(proj1.attrs, proj2.attrs) - if (attrs.get('area') is None - and proj1.attrs.get('area') is not None - and proj2.attrs.get('area') is not None): + if (attrs.get("area") is None + and proj1.attrs.get("area") is not None + and proj2.attrs.get("area") is not None): raise IncompatibleAreas res = proj1 - proj2 res.attrs = attrs @@ -114,9 +114,9 @@ def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwar def id(self): """Return the DataID of the object.""" try: - return self.attrs['_satpy_id'] + return self.attrs["_satpy_id"] except KeyError: - id_keys = self.attrs.get('_satpy_id_keys', minimal_default_keys_config) + id_keys = self.attrs.get("_satpy_id_keys", minimal_default_keys_config) return DataID(id_keys, **self.attrs) def __call__(self, datasets, optional_datasets=None, **info): @@ -135,15 +135,15 @@ def __repr__(self): def apply_modifier_info(self, origin, destination): """Apply the modifier info from *origin* to *destination*.""" - o = getattr(origin, 'attrs', origin) - d = getattr(destination, 'attrs', destination) + o = getattr(origin, "attrs", origin) + d = getattr(destination, "attrs", destination) try: - dataset_keys = self.attrs['_satpy_id'].id_keys.keys() + dataset_keys = self.attrs["_satpy_id"].id_keys.keys() except KeyError: - dataset_keys = ['name', 'modifiers'] + dataset_keys = ["name", "modifiers"] for k in dataset_keys: - if k == 'modifiers' and k in self.attrs: + if k == "modifiers" and k in self.attrs: d[k] = self.attrs[k] elif d.get(k) is None: if self.attrs.get(k) is not None: @@ -225,16 +225,16 @@ def check_geolocation(self, data_arrays): if len(data_arrays) == 1: return - if 'x' in data_arrays[0].dims and \ - not all(x.sizes['x'] == data_arrays[0].sizes['x'] + if "x" in data_arrays[0].dims and \ + not all(x.sizes["x"] == data_arrays[0].sizes["x"] for x in data_arrays[1:]): raise IncompatibleAreas("X dimension has different sizes") - if 'y' in data_arrays[0].dims and \ - not all(x.sizes['y'] == data_arrays[0].sizes['y'] + if "y" in data_arrays[0].dims and \ + not all(x.sizes["y"] == data_arrays[0].sizes["y"] for x in data_arrays[1:]): raise IncompatibleAreas("Y dimension has different sizes") - areas = [ds.attrs.get('area') for ds in data_arrays] + areas = [ds.attrs.get("area") for ds in data_arrays] if all(a is None for a in areas): return if any(a is None for a in areas): @@ -242,7 +242,7 @@ def check_geolocation(self, data_arrays): if not all(areas[0] == x for x in areas[1:]): LOG.debug("Not all areas are the same in " - "'{}'".format(self.attrs['name'])) + "'{}'".format(self.attrs["name"])) raise IncompatibleAreas("Areas are different") @@ -255,7 +255,7 @@ def __call__(self, projectables, nonprojectables=None, **attrs): raise ValueError("Expected 2 datasets, got %d" % (len(projectables),)) projectables = self.match_data_arrays(projectables) info = combine_metadata(*projectables) - info['name'] = self.attrs['name'] + info["name"] = self.attrs["name"] info.update(self.attrs) # attrs from YAML/__init__ info.update(attrs) # overwriting of DataID properties @@ -273,7 +273,7 @@ def __call__(self, projectables, nonprojectables=None, **info): raise ValueError("Expected 2 datasets, got %d" % (len(projectables),)) projectables = self.match_data_arrays(projectables) info = combine_metadata(*projectables) - info['name'] = self.attrs['name'] + info["name"] = self.attrs["name"] proj = projectables[0] / projectables[1] proj.attrs = info @@ -289,7 +289,7 @@ def __call__(self, projectables, nonprojectables=None, **info): raise ValueError("Expected 2 datasets, got %d" % (len(projectables),)) projectables = self.match_data_arrays(projectables) info = combine_metadata(*projectables) - info['name'] = self.attrs['name'] + info["name"] = self.attrs["name"] proj = projectables[0] + projectables[1] proj.attrs = info @@ -316,10 +316,10 @@ def __call__(self, projectables, nonprojectables=None, **attrs): data = projectables[0] new_attrs = data.attrs.copy() self._update_missing_metadata(new_attrs, attrs) - resolution = new_attrs.get('resolution', None) + resolution = new_attrs.get("resolution", None) new_attrs.update(self.attrs) if resolution is not None: - new_attrs['resolution'] = resolution + new_attrs["resolution"] = resolution return xr.DataArray(data=data.data, attrs=new_attrs, dims=data.dims, coords=data.coords) @@ -349,8 +349,8 @@ def __init__(self, name, lut=None, **kwargs): def _update_attrs(self, new_attrs): """Modify name and add LUT.""" - new_attrs['name'] = self.attrs['name'] - new_attrs['composite_lut'] = list(self.lut) + new_attrs["name"] = self.attrs["name"] + new_attrs["composite_lut"] = list(self.lut) @staticmethod def _getitem(block, lut): @@ -373,7 +373,7 @@ def __call__(self, projectables, **kwargs): class GenericCompositor(CompositeBase): """Basic colored composite builder.""" - modes = {1: 'L', 2: 'LA', 3: 'RGB', 4: 'RGBA'} + modes = {1: "L", 2: "LA", 3: "RGB", 4: "RGBA"} def __init__(self, name, common_channel_mask=True, **kwargs): """Collect custom configuration values. @@ -389,18 +389,18 @@ def __init__(self, name, common_channel_mask=True, **kwargs): @classmethod def infer_mode(cls, data_arr): """Guess at the mode for a particular DataArray.""" - if 'mode' in data_arr.attrs: - return data_arr.attrs['mode'] - if 'bands' not in data_arr.dims: + if "mode" in data_arr.attrs: + return data_arr.attrs["mode"] + if "bands" not in data_arr.dims: return cls.modes[1] - if 'bands' in data_arr.coords and isinstance(data_arr.coords['bands'][0].item(), str): - return ''.join(data_arr.coords['bands'].values) - return cls.modes[data_arr.sizes['bands']] + if "bands" in data_arr.coords and isinstance(data_arr.coords["bands"][0].item(), str): + return "".join(data_arr.coords["bands"].values) + return cls.modes[data_arr.sizes["bands"]] def _concat_datasets(self, projectables, mode): try: - data = xr.concat(projectables, 'bands', coords='minimal') - data['bands'] = list(mode) + data = xr.concat(projectables, "bands", coords="minimal") + data["bands"] = list(mode) except ValueError as e: LOG.debug("Original exception for incompatible areas: {}".format(str(e))) raise IncompatibleAreas @@ -424,15 +424,15 @@ def _get_sensors(self, projectables): def __call__(self, projectables, nonprojectables=None, **attrs): """Build the composite.""" - if 'deprecation_warning' in self.attrs: + if "deprecation_warning" in self.attrs: warnings.warn( - self.attrs['deprecation_warning'], + self.attrs["deprecation_warning"], UserWarning, stacklevel=2 ) - self.attrs.pop('deprecation_warning', None) + self.attrs.pop("deprecation_warning", None) num = len(projectables) - mode = attrs.get('mode') + mode = attrs.get("mode") if mode is None: # num may not be in `self.modes` so only check if we need to mode = self.modes[num] @@ -440,8 +440,8 @@ def __call__(self, projectables, nonprojectables=None, **attrs): projectables = self.match_data_arrays(projectables) data = self._concat_datasets(projectables, mode) # Skip masking if user wants it or a specific alpha channel is given. - if self.common_channel_mask and mode[-1] != 'A': - data = data.where(data.notnull().all(dim='bands')) + if self.common_channel_mask and mode[-1] != "A": + data = data.where(data.notnull().all(dim="bands")) else: data = projectables[0] @@ -450,23 +450,23 @@ def __call__(self, projectables, nonprojectables=None, **attrs): # time coordinate value if len(projectables) > 1: time = check_times(projectables) - if time is not None and 'time' in data.dims: - data['time'] = [time] + if time is not None and "time" in data.dims: + data["time"] = [time] new_attrs = combine_metadata(*projectables) # remove metadata that shouldn't make sense in a composite new_attrs["wavelength"] = None new_attrs.pop("units", None) - new_attrs.pop('calibration', None) - new_attrs.pop('modifiers', None) + new_attrs.pop("calibration", None) + new_attrs.pop("modifiers", None) new_attrs.update({key: val for (key, val) in attrs.items() if val is not None}) - resolution = new_attrs.get('resolution', None) + resolution = new_attrs.get("resolution", None) new_attrs.update(self.attrs) if resolution is not None: - new_attrs['resolution'] = resolution + new_attrs["resolution"] = resolution new_attrs["sensor"] = self._get_sensors(projectables) new_attrs["mode"] = mode @@ -505,8 +505,8 @@ def __call__(self, projectables, nonprojectables=None, **info): filled_projectable = projectables[0] for next_projectable in projectables[1:]: filled_projectable = filled_projectable.fillna(next_projectable) - if 'optional_datasets' in info.keys(): - for next_projectable in info['optional_datasets']: + if "optional_datasets" in info.keys(): + for next_projectable in info["optional_datasets"]: filled_projectable = filled_projectable.fillna(next_projectable) return super().__call__([filled_projectable], **info) @@ -604,16 +604,16 @@ def _create_composite_from_channels(self, channels, template): mask = self._get_mask_from_data(template) channels = [self._create_masked_dataarray_like(channel, template, mask) for channel in channels] res = super(ColormapCompositor, self).__call__(channels, **template.attrs) - res.attrs['_FillValue'] = np.nan + res.attrs["_FillValue"] = np.nan return res @staticmethod def _get_mask_from_data(data): - fill_value = data.attrs.get('_FillValue', np.nan) + fill_value = data.attrs.get("_FillValue", np.nan) if np.isnan(fill_value): mask = data.notnull() else: - mask = data != data.attrs['_FillValue'] + mask = data != data.attrs["_FillValue"] return mask @staticmethod @@ -770,8 +770,8 @@ def _get_data_for_combined_product(self, day_data, night_data): # L/RGB -> RGB/RGB # LA/RGB -> RGBA/RGBA # RGB/RGBA -> RGBA/RGBA - day_data = add_bands(day_data, night_data['bands']) - night_data = add_bands(night_data, day_data['bands']) + day_data = add_bands(day_data, night_data["bands"]) + night_data = add_bands(night_data, day_data["bands"]) # Get merged metadata attrs = combine_metadata(day_data, night_data) @@ -801,7 +801,7 @@ def _weight_data(self, day_data, night_data, weights, attrs): night_band = _get_single_band_data(night_data, b) # For day-only and night-only products only the alpha channel is weighted # If there's no alpha band, weight the actual data - if b == 'A' or "only" not in self.day_night or not self.include_alpha: + if b == "A" or "only" not in self.day_night or not self.include_alpha: day_band = day_band * weights night_band = night_band * (1 - weights) band = day_band + night_band @@ -812,9 +812,9 @@ def _weight_data(self, day_data, night_data, weights, attrs): def _get_band_names(day_data, night_data): try: - bands = day_data['bands'] + bands = day_data["bands"] except TypeError: - bands = night_data['bands'] + bands = night_data["bands"] return bands @@ -850,18 +850,18 @@ def add_alpha_bands(data): Add an alpha band to L or RGB composite as prerequisites for the following band matching to make the masked-out area transparent. """ - if 'A' not in data['bands'].data: - new_data = [data.sel(bands=band) for band in data['bands'].data] + if "A" not in data["bands"].data: + new_data = [data.sel(bands=band) for band in data["bands"].data] # Create alpha band based on a copy of the first "real" band alpha = new_data[0].copy() - alpha.data = da.ones((data.sizes['y'], - data.sizes['x']), + alpha.data = da.ones((data.sizes["y"], + data.sizes["x"]), chunks=new_data[0].chunks) # Rename band to indicate it's alpha - alpha['bands'] = 'A' + alpha["bands"] = "A" new_data.append(alpha) - new_data = xr.concat(new_data, dim='bands') - new_data.attrs['mode'] = data.attrs['mode'] + 'A' + new_data = xr.concat(new_data, dim="bands") + new_data.attrs["mode"] = data.attrs["mode"] + "A" data = new_data return data @@ -875,17 +875,17 @@ def enhance2dataset(dset, convert_p=False): data = _get_data_from_enhanced_image(dset, convert_p) data.attrs = attrs # remove 'mode' if it is specified since it may have been updated - data.attrs.pop('mode', None) + data.attrs.pop("mode", None) # update mode since it may have changed (colorized/palettize) - data.attrs['mode'] = GenericCompositor.infer_mode(data) + data.attrs["mode"] = GenericCompositor.infer_mode(data) return data def _get_data_from_enhanced_image(dset, convert_p): img = get_enhanced_image(dset) - if convert_p and img.mode == 'P': + if convert_p and img.mode == "P": img = _apply_palette_to_image(img) - if img.mode != 'P': + if img.mode != "P": data = img.data.clip(0.0, 1.0) else: data = img.data @@ -894,9 +894,9 @@ def _get_data_from_enhanced_image(dset, convert_p): def _apply_palette_to_image(img): if len(img.palette[0]) == 3: - img = img.convert('RGB') + img = img.convert("RGB") elif len(img.palette[0]) == 4: - img = img.convert('RGBA') + img = img.convert("RGBA") return img @@ -904,36 +904,36 @@ def add_bands(data, bands): """Add bands so that they match *bands*.""" # Add R, G and B bands, remove L band bands = bands.compute() - if 'P' in data['bands'].data or 'P' in bands.data: - raise NotImplementedError('Cannot mix datasets of mode P with other datasets at the moment.') - if 'L' in data['bands'].data and 'R' in bands.data: - lum = data.sel(bands='L') + if "P" in data["bands"].data or "P" in bands.data: + raise NotImplementedError("Cannot mix datasets of mode P with other datasets at the moment.") + if "L" in data["bands"].data and "R" in bands.data: + lum = data.sel(bands="L") # Keep 'A' if it was present - if 'A' in data['bands']: - alpha = data.sel(bands='A') + if "A" in data["bands"]: + alpha = data.sel(bands="A") new_data = (lum, lum, lum, alpha) - new_bands = ['R', 'G', 'B', 'A'] - mode = 'RGBA' + new_bands = ["R", "G", "B", "A"] + mode = "RGBA" else: new_data = (lum, lum, lum) - new_bands = ['R', 'G', 'B'] - mode = 'RGB' - data = xr.concat(new_data, dim='bands', coords={'bands': new_bands}) - data['bands'] = new_bands - data.attrs['mode'] = mode + new_bands = ["R", "G", "B"] + mode = "RGB" + data = xr.concat(new_data, dim="bands", coords={"bands": new_bands}) + data["bands"] = new_bands + data.attrs["mode"] = mode # Add alpha band - if 'A' not in data['bands'].data and 'A' in bands.data: - new_data = [data.sel(bands=band) for band in data['bands'].data] + if "A" not in data["bands"].data and "A" in bands.data: + new_data = [data.sel(bands=band) for band in data["bands"].data] # Create alpha band based on a copy of the first "real" band alpha = new_data[0].copy() - alpha.data = da.ones((data.sizes['y'], - data.sizes['x']), + alpha.data = da.ones((data.sizes["y"], + data.sizes["x"]), chunks=new_data[0].chunks) # Rename band to indicate it's alpha - alpha['bands'] = 'A' + alpha["bands"] = "A" new_data.append(alpha) - new_data = xr.concat(new_data, dim='bands') - new_data.attrs['mode'] = data.attrs['mode'] + 'A' + new_data = xr.concat(new_data, dim="bands") + new_data.attrs["mode"] = data.attrs["mode"] + "A" data = new_data return data @@ -1061,11 +1061,11 @@ def __init__(self, *args, **kwargs): """Instanciate the ration sharpener.""" self.high_resolution_color = kwargs.pop("high_resolution_band", "red") self.neutral_resolution_color = kwargs.pop("neutral_resolution_band", None) - if self.high_resolution_color not in ['red', 'green', 'blue', None]: + if self.high_resolution_color not in ["red", "green", "blue", None]: raise ValueError("RatioSharpenedRGB.high_resolution_band must " "be one of ['red', 'green', 'blue', None]. Not " "'{}'".format(self.high_resolution_color)) - if self.neutral_resolution_color not in ['red', 'green', 'blue', None]: + if self.neutral_resolution_color not in ["red", "green", "blue", None]: raise ValueError("RatioSharpenedRGB.neutral_resolution_band must " "be one of ['red', 'green', 'blue', None]. Not " "'{}'".format(self.neutral_resolution_color)) @@ -1081,8 +1081,8 @@ def __call__(self, datasets, optional_datasets=None, **info): if not all(x.shape == datasets[0].shape for x in datasets[1:]) or \ (optional_datasets and optional_datasets[0].shape != datasets[0].shape): - raise IncompatibleAreas('RatioSharpening requires datasets of ' - 'the same size. Must resample first.') + raise IncompatibleAreas("RatioSharpening requires datasets of " + "the same size. Must resample first.") optional_datasets = tuple() if optional_datasets is None else optional_datasets datasets = self.match_data_arrays(datasets + optional_datasets) @@ -1100,19 +1100,19 @@ def _get_and_sharpen_rgb_data_arrays_and_meta(self, datasets, optional_datasets) if optional_datasets and self.high_resolution_color is not None: LOG.debug("Sharpening image with high resolution {} band".format(self.high_resolution_color)) high_res = datasets[3] - if 'rows_per_scan' in high_res.attrs: - new_attrs.setdefault('rows_per_scan', high_res.attrs['rows_per_scan']) - new_attrs.setdefault('resolution', high_res.attrs['resolution']) + if "rows_per_scan" in high_res.attrs: + new_attrs.setdefault("rows_per_scan", high_res.attrs["rows_per_scan"]) + new_attrs.setdefault("resolution", high_res.attrs["resolution"]) else: LOG.debug("No sharpening band specified for ratio sharpening") high_res = None - bands = {'red': low_res_red, 'green': low_res_green, 'blue': low_res_blue} + bands = {"red": low_res_red, "green": low_res_green, "blue": low_res_blue} if high_res is not None: self._sharpen_bands_with_high_res(bands, high_res) - return bands['red'], bands['green'], bands['blue'], new_attrs + return bands["red"], bands["green"], bands["blue"], new_attrs def _sharpen_bands_with_high_res(self, bands, high_res): ratio = da.map_blocks( @@ -1170,9 +1170,9 @@ def _mean4(data, offset=(0, 0), block_id=None): rows2 = rows + row_offset + row_after cols2 = cols + col_offset + col_after - av_data = np.pad(data, pad, 'edge') + av_data = np.pad(data, pad, "edge") new_shape = (int(rows2 / 2.), 2, int(cols2 / 2.), 2) - with np.errstate(invalid='ignore'): + with np.errstate(invalid="ignore"): data_mean = np.nanmean(av_data.reshape(new_shape), axis=(1, 3)) data_mean = np.repeat(np.repeat(data_mean, 2, axis=0), 2, axis=1) data_mean = data_mean[row_offset:row_offset + rows, col_offset:col_offset + cols] @@ -1199,7 +1199,7 @@ class SelfSharpenedRGB(RatioSharpenedRGB): def four_element_average_dask(d): """Average every 4 elements (2x2) in a 2D array.""" try: - offset = d.attrs['area'].crop_offset + offset = d.attrs["area"].crop_offset except (KeyError, AttributeError): offset = (0, 0) @@ -1208,16 +1208,16 @@ def four_element_average_dask(d): def __call__(self, datasets, optional_datasets=None, **attrs): """Generate the composite.""" - colors = ['red', 'green', 'blue'] + colors = ["red", "green", "blue"] if self.high_resolution_color not in colors: raise ValueError("SelfSharpenedRGB requires at least one high resolution band, not " "'{}'".format(self.high_resolution_color)) high_res = datasets[colors.index(self.high_resolution_color)] high_mean = self.four_element_average_dask(high_res) - red = high_mean if self.high_resolution_color == 'red' else datasets[0] - green = high_mean if self.high_resolution_color == 'green' else datasets[1] - blue = high_mean if self.high_resolution_color == 'blue' else datasets[2] + red = high_mean if self.high_resolution_color == "red" else datasets[0] + green = high_mean if self.high_resolution_color == "green" else datasets[1] + blue = high_mean if self.high_resolution_color == "blue" else datasets[2] return super(SelfSharpenedRGB, self).__call__((red, green, blue), optional_datasets=(high_res,), **attrs) @@ -1273,7 +1273,7 @@ def __call__(self, projectables, *args, **kwargs): # Get the enhanced version of the RGB composite to be sharpened rgb_img = enhance2dataset(projectables[1]) # Ignore alpha band when applying luminance - rgb_img = rgb_img.where(rgb_img.bands == 'A', rgb_img * luminance) + rgb_img = rgb_img.where(rgb_img.bands == "A", rgb_img * luminance) return super(SandwichCompositor, self).__call__(rgb_img, *args, **kwargs) @@ -1378,7 +1378,7 @@ def __init__(self, name, filename=None, url=None, known_hash=None, area=None, @staticmethod def _check_relative_filename(filename): - data_dir = satpy.config.get('data_dir') + data_dir = satpy.config.get("data_dir") path = os.path.join(data_dir, filename) return path if os.path.exists(path) else filename @@ -1406,9 +1406,9 @@ def register_data_files(self, data_files): if os.path.isabs(self._cache_filename): return [None] return super().register_data_files([{ - 'url': self._url, - 'known_hash': self._known_hash, - 'filename': self._cache_filename, + "url": self._url, + "known_hash": self._known_hash, + "filename": self._cache_filename, }]) def _retrieve_data_file(self): @@ -1421,29 +1421,29 @@ def __call__(self, *args, **kwargs): """Call the compositor.""" from satpy import Scene local_file = self._retrieve_data_file() - scn = Scene(reader='generic_image', filenames=[local_file]) - scn.load(['image']) - img = scn['image'] + scn = Scene(reader="generic_image", filenames=[local_file]) + scn.load(["image"]) + img = scn["image"] # use compositor parameters as extra metadata # most important: set 'name' of the image img.attrs.update(self.attrs) # Check for proper area definition. Non-georeferenced images # do not have `area` in the attributes - if 'area' not in img.attrs: + if "area" not in img.attrs: if self.area is None: raise AttributeError("Area definition needs to be configured") - img.attrs['area'] = self.area - img.attrs['sensor'] = None - img.attrs['mode'] = ''.join(img.bands.data) - img.attrs.pop('modifiers', None) - img.attrs.pop('calibration', None) + img.attrs["area"] = self.area + img.attrs["sensor"] = None + img.attrs["mode"] = "".join(img.bands.data) + img.attrs.pop("modifiers", None) + img.attrs.pop("calibration", None) # Add start time if not present in the filename - if 'start_time' not in img.attrs or not img.attrs['start_time']: + if "start_time" not in img.attrs or not img.attrs["start_time"]: import datetime as dt - img.attrs['start_time'] = dt.datetime.utcnow() - if 'end_time' not in img.attrs or not img.attrs['end_time']: + img.attrs["start_time"] = dt.datetime.utcnow() + if "end_time" not in img.attrs or not img.attrs["end_time"]: import datetime as dt - img.attrs['end_time'] = dt.datetime.utcnow() + img.attrs["end_time"] = dt.datetime.utcnow() return img @@ -1461,8 +1461,8 @@ def __call__(self, projectables, *args, **kwargs): # L/RGB -> RGB/RGB # LA/RGB -> RGBA/RGBA # RGB/RGBA -> RGBA/RGBA - foreground = add_bands(foreground, background['bands']) - background = add_bands(background, foreground['bands']) + foreground = add_bands(foreground, background["bands"]) + background = add_bands(background, foreground["bands"]) attrs = self._combine_metadata_with_mode_and_sensor(foreground, background) data = self._get_merged_image_data(foreground, background) @@ -1479,18 +1479,18 @@ def _combine_metadata_with_mode_and_sensor(self, # 'mode' is no longer valid after we've remove the 'A' # let the base class __call__ determine mode attrs.pop("mode", None) - if attrs.get('sensor') is None: + if attrs.get("sensor") is None: # sensor can be a set - attrs['sensor'] = self._get_sensors([foreground, background]) + attrs["sensor"] = self._get_sensors([foreground, background]) return attrs @staticmethod def _get_merged_image_data(foreground: xr.DataArray, background: xr.DataArray ) -> list[xr.DataArray]: - if 'A' in foreground.attrs['mode']: + if "A" in foreground.attrs["mode"]: # Use alpha channel as weight and blend the two composites - alpha = foreground.sel(bands='A') + alpha = foreground.sel(bands="A") data = [] # NOTE: there's no alpha band in the output image, it will # be added by the data writer @@ -1503,7 +1503,7 @@ def _get_merged_image_data(foreground: xr.DataArray, else: data_arr = xr.where(foreground.isnull(), background, foreground) # Split to separate bands so the mode is correct - data = [data_arr.sel(bands=b) for b in data_arr['bands']] + data = [data_arr.sel(bands=b) for b in data_arr["bands"]] return data @@ -1577,9 +1577,9 @@ def __init__(self, name, transparency=None, conditions=None, mode="LA", "MaskingCompositor, use 'conditions' instead.") self.conditions = [] for key, transp in transparency.items(): - self.conditions.append({'method': 'equal', - 'value': key, - 'transparency': transp}) + self.conditions.append({"method": "equal", + "value": key, + "transparency": transp}) LOG.info("Converted 'transparency' to 'conditions': %s", str(self.conditions)) else: @@ -1643,8 +1643,8 @@ def _select_data_bands(self, data_in): From input data, select the bands that need to have masking applied. """ - if 'bands' in data_in.dims: - return [data_in.sel(bands=b) for b in data_in['bands'] if b != 'A'] + if "bands" in data_in.dims: + return [data_in.sel(bands=b) for b in data_in["bands"] if b != "A"] if self.mode == "RGBA": return [data_in, data_in, data_in] return [data_in] @@ -1656,16 +1656,16 @@ def _get_alpha_bands(self, data, mask_in, alpha_attrs): """ # Create alpha band mask_data = mask_in.data - alpha = da.ones((data[0].sizes['y'], - data[0].sizes['x']), + alpha = da.ones((data[0].sizes["y"], + data[0].sizes["x"]), chunks=data[0].chunks) for condition in self.conditions: - method = condition['method'] - value = condition.get('value', None) + method = condition["method"] + value = condition.get("value", None) if isinstance(value, str): value = _get_flag_value(mask_in, value) - transparency = condition['transparency'] + transparency = condition["transparency"] mask = self._get_mask(method, value, mask_data) if transparency == 100.0: @@ -1684,8 +1684,8 @@ def _get_flag_value(mask, val): NWC SAF GEO/PPS softwares. """ - flag_meanings = mask.attrs['flag_meanings'] - flag_values = mask.attrs['flag_values'] + flag_meanings = mask.attrs["flag_meanings"] + flag_values = mask.attrs["flag_values"] if isinstance(flag_meanings, str): flag_meanings = flag_meanings.split() diff --git a/satpy/composites/cloud_products.py b/satpy/composites/cloud_products.py index a05be8ad17..4dbc2e489f 100644 --- a/satpy/composites/cloud_products.py +++ b/satpy/composites/cloud_products.py @@ -31,7 +31,7 @@ def __call__(self, projectables, **info): raise ValueError("Expected 2 datasets, got %d" % (len(projectables), )) data, status = projectables - valid = status != status.attrs['_FillValue'] + valid = status != status.attrs["_FillValue"] status_cloud_free = status % 2 == 1 # bit 0 is set cloud_free = np.logical_and(valid, status_cloud_free) if "bad_optical_conditions" in status.attrs.get("flag_meanings", "") and data.name == "cmic_cre": @@ -43,7 +43,7 @@ def __call__(self, projectables, **info): # Keep cloudfree or valid product data = data.where(np.logical_or(cloud_free, data != data.attrs["scaled_FillValue"]), np.nan) res = SingleBandCompositor.__call__(self, [data], **data.attrs) - res.attrs['_FillValue'] = np.nan + res.attrs["_FillValue"] = np.nan return res @@ -56,15 +56,15 @@ def __call__(self, projectables, **info): raise ValueError("Expected 2 datasets, got %d" % (len(projectables), )) data, cma = projectables - valid_cma = cma != cma.attrs['_FillValue'] - valid_prod = data != data.attrs['_FillValue'] + valid_cma = cma != cma.attrs["_FillValue"] + valid_prod = data != data.attrs["_FillValue"] valid_prod = np.logical_and(valid_prod, np.logical_not(np.isnan(data))) # Update valid_cma and not valid_prod means: keep not valid cma or valid prod data = data.where(np.logical_or(np.logical_not(valid_cma), valid_prod), data.attrs["scaled_FillValue"]) data = data.where(np.logical_or(valid_prod, valid_cma), np.nan) res = SingleBandCompositor.__call__(self, [data], **data.attrs) - res.attrs['_FillValue'] = np.nan + res.attrs["_FillValue"] = np.nan return res @@ -95,15 +95,15 @@ def __call__(self, projectables, *args, **kwargs): scalef1 = 1.0 / maxs1 - 1 / 255.0 p1data = (light*scalef1).where(light != 0) - p1data = p1data.where(light != light.attrs['_FillValue']) + p1data = p1data.where(light != light.attrs["_FillValue"]) p1data.attrs = light.attrs data = moderate*scalef2 p2data = data.where(moderate != 0) - p2data = p2data.where(moderate != moderate.attrs['_FillValue']) + p2data = p2data.where(moderate != moderate.attrs["_FillValue"]) p2data.attrs = moderate.attrs data = intense*scalef3 p3data = data.where(intense != 0) - p3data = p3data.where(intense != intense.attrs['_FillValue']) + p3data = p3data.where(intense != intense.attrs["_FillValue"]) p3data.attrs = intense.attrs res = super(PrecipCloudsRGB, self).__call__((p3data, p2data, p1data), diff --git a/satpy/composites/config_loader.py b/satpy/composites/config_loader.py index e5af45355b..bffbee8a13 100644 --- a/satpy/composites/config_loader.py +++ b/satpy/composites/config_loader.py @@ -38,10 +38,10 @@ def _convert_dep_info_to_data_query(dep_info): key_item = dep_info.copy() - key_item.pop('prerequisites', None) - key_item.pop('optional_prerequisites', None) - if 'modifiers' in key_item: - key_item['modifiers'] = tuple(key_item['modifiers']) + key_item.pop("prerequisites", None) + key_item.pop("optional_prerequisites", None) + if "modifiers" in key_item: + key_item["modifiers"] = tuple(key_item["modifiers"]) key = DataQuery.from_dict(key_item) return key @@ -64,14 +64,14 @@ def _create_comp_from_info(self, composite_info, loader): def _handle_inline_comp_dep(self, dep_info, dep_num, parent_name): # Create an unique temporary name for the composite - sub_comp_name = '_' + parent_name + '_dep_{}'.format(dep_num) - dep_info['name'] = sub_comp_name + sub_comp_name = "_" + parent_name + "_dep_{}".format(dep_num) + dep_info["name"] = sub_comp_name self._load_config_composite(dep_info) @staticmethod def _get_compositor_loader_from_config(composite_name, composite_info): try: - loader = composite_info.pop('compositor') + loader = composite_info.pop("compositor") except KeyError: raise ValueError("'compositor' key missing or empty for '{}'. Option keys = {}".format( composite_name, str(composite_info.keys()))) @@ -79,22 +79,22 @@ def _get_compositor_loader_from_config(composite_name, composite_info): def _process_composite_deps(self, composite_info): dep_num = -1 - for prereq_type in ['prerequisites', 'optional_prerequisites']: + for prereq_type in ["prerequisites", "optional_prerequisites"]: prereqs = [] for dep_info in composite_info.get(prereq_type, []): dep_num += 1 if not isinstance(dep_info, dict): prereqs.append(dep_info) continue - elif 'compositor' in dep_info: + elif "compositor" in dep_info: self._handle_inline_comp_dep( - dep_info, dep_num, composite_info['name']) + dep_info, dep_num, composite_info["name"]) prereq_key = _convert_dep_info_to_data_query(dep_info) prereqs.append(prereq_key) composite_info[prereq_type] = prereqs def _load_config_composite(self, composite_info): - composite_name = composite_info['name'] + composite_name = composite_info["name"] loader = self._get_compositor_loader_from_config(composite_name, composite_info) self._process_composite_deps(composite_info) key, comp = self._create_comp_from_info(composite_info, loader) @@ -102,7 +102,7 @@ def _load_config_composite(self, composite_info): def _load_config_composites(self, configured_composites): for composite_name, composite_info in configured_composites.items(): - composite_info['name'] = composite_name + composite_info["name"] = composite_name self._load_config_composite(composite_info) def parse_config(self, configured_composites, composite_configs): @@ -128,9 +128,9 @@ def __init__(self, loaded_modifiers, sensor_id_keys): @staticmethod def _get_modifier_loader_from_config(modifier_name, modifier_info): try: - loader = modifier_info.pop('modifier', None) + loader = modifier_info.pop("modifier", None) if loader is None: - loader = modifier_info.pop('compositor') + loader = modifier_info.pop("compositor") warnings.warn( "Modifier '{}' uses deprecated 'compositor' " "key to point to Python class, replace " @@ -143,7 +143,7 @@ def _get_modifier_loader_from_config(modifier_name, modifier_info): return loader def _process_modifier_deps(self, modifier_info): - for prereq_type in ['prerequisites', 'optional_prerequisites']: + for prereq_type in ["prerequisites", "optional_prerequisites"]: prereqs = [] for dep_info in modifier_info.get(prereq_type, []): if not isinstance(dep_info, dict): @@ -154,14 +154,14 @@ def _process_modifier_deps(self, modifier_info): modifier_info[prereq_type] = prereqs def _load_config_modifier(self, modifier_info): - modifier_name = modifier_info['name'] + modifier_name = modifier_info["name"] loader = self._get_modifier_loader_from_config(modifier_name, modifier_info) self._process_modifier_deps(modifier_info) self.loaded_modifiers[modifier_name] = (loader, modifier_info) def _load_config_modifiers(self, configured_modifiers): for modifier_name, modifier_info in configured_modifiers.items(): - modifier_info['name'] = modifier_name + modifier_info["name"] = modifier_name self._load_config_modifier(modifier_info) def parse_config(self, configured_modifiers, composite_configs): @@ -179,10 +179,10 @@ def _load_config(composite_configs): conf = {} for composite_config in composite_configs: - with open(composite_config, 'r', encoding='utf-8') as conf_file: + with open(composite_config, "r", encoding="utf-8") as conf_file: conf = recursive_dict_update(conf, yaml.load(conf_file, Loader=UnsafeLoader)) try: - sensor_name = conf['sensor_name'] + sensor_name = conf["sensor_name"] except KeyError: logger.debug('No "sensor_name" tag found in %s, skipping.', composite_configs) @@ -192,7 +192,7 @@ def _load_config(composite_configs): sensor_modifiers = {} dep_id_keys = None - sensor_deps = sensor_name.split('/')[:-1] + sensor_deps = sensor_name.split("/")[:-1] if sensor_deps: # get dependent for sensor_dep in sensor_deps: @@ -203,18 +203,18 @@ def _load_config(composite_configs): id_keys = _get_sensor_id_keys(conf, dep_id_keys) mod_config_helper = _ModifierConfigHelper(sensor_modifiers, id_keys) - configured_modifiers = conf.get('modifiers', {}) + configured_modifiers = conf.get("modifiers", {}) mod_config_helper.parse_config(configured_modifiers, composite_configs) comp_config_helper = _CompositeConfigHelper(sensor_compositors, id_keys) - configured_composites = conf.get('composites', {}) + configured_composites = conf.get("composites", {}) comp_config_helper.parse_config(configured_composites, composite_configs) return sensor_compositors, sensor_modifiers, id_keys def _get_sensor_id_keys(conf, parent_id_keys): try: - id_keys = conf['composite_identification_keys'] + id_keys = conf["composite_identification_keys"] except KeyError: id_keys = parent_id_keys if not id_keys: @@ -270,7 +270,7 @@ def load_compositor_configs_for_sensor(sensor_name: str) -> tuple[dict[str, dict """ config_filename = sensor_name + ".yaml" logger.debug("Looking for composites config file %s", config_filename) - paths = get_entry_points_config_dirs('satpy.composites') + paths = get_entry_points_config_dirs("satpy.composites") composite_configs = config_search_paths( os.path.join("composites", config_filename), search_dirs=paths, check_exists=True) @@ -310,12 +310,12 @@ def load_compositor_configs_for_sensors(sensor_names: Iterable[str]) -> tuple[di def all_composite_sensors(): """Get all sensor names from available composite configs.""" - paths = get_entry_points_config_dirs('satpy.composites') + paths = get_entry_points_config_dirs("satpy.composites") composite_configs = glob_config( os.path.join("composites", "*.yaml"), search_dirs=paths) yaml_names = set([os.path.splitext(os.path.basename(fn))[0] for fn in composite_configs]) - non_sensor_yamls = ('visir',) + non_sensor_yamls = ("visir",) sensor_names = [x for x in yaml_names if x not in non_sensor_yamls] return sensor_names diff --git a/satpy/composites/glm.py b/satpy/composites/glm.py index 48fe6b922c..e9b6ef275e 100644 --- a/satpy/composites/glm.py +++ b/satpy/composites/glm.py @@ -74,7 +74,7 @@ def __init__(self, name, min_highlight=0.0, max_highlight=10.0, def _get_enhanced_background_data(background_layer): img = get_enhanced_image(background_layer) img.data = img.data.clip(0.0, 1.0) - img = img.convert('RGBA') + img = img.convert("RGBA") return img.data def _get_highlight_factor(self, highlight_data): @@ -93,10 +93,10 @@ def _apply_highlight_effect(self, background_data, factor): def _update_attrs(self, new_data, background_layer, highlight_layer): new_data.attrs = background_layer.attrs.copy() - new_data.attrs['units'] = 1 + new_data.attrs["units"] = 1 new_sensors = self._get_sensors((highlight_layer, background_layer)) new_data.attrs.update({ - 'sensor': new_sensors, + "sensor": new_sensors, }) def __call__(self, projectables, optional_datasets=None, **attrs): @@ -107,7 +107,7 @@ def __call__(self, projectables, optional_datasets=None, **attrs): # Adjust the colors of background by highlight layer factor = self._get_highlight_factor(highlight_product) new_channels = self._apply_highlight_effect(background_data, factor) - new_data = xr.concat(new_channels, dim='bands') + new_data = xr.concat(new_channels, dim="bands") self._update_attrs(new_data, background_layer, highlight_product) return super(HighlightCompositor, self).__call__((new_data,), **attrs) diff --git a/satpy/composites/spectral.py b/satpy/composites/spectral.py index 7d05a000d6..59e8518a7e 100644 --- a/satpy/composites/spectral.py +++ b/satpy/composites/spectral.py @@ -149,7 +149,7 @@ class NDVIHybridGreen(SpectralBlender): def __init__(self, *args, ndvi_min=0.0, ndvi_max=1.0, limits=(0.15, 0.05), strength=1.0, **kwargs): """Initialize class and set the NDVI limits, blending fraction limits and strength.""" if strength <= 0.0: - raise ValueError(f"Expected stength greater than 0.0, got {strength}.") + raise ValueError(f"Expected strength greater than 0.0, got {strength}.") self.ndvi_min = ndvi_min self.ndvi_max = ndvi_max diff --git a/satpy/composites/viirs.py b/satpy/composites/viirs.py index 1dd0523053..5df2d482af 100644 --- a/satpy/composites/viirs.py +++ b/satpy/composites/viirs.py @@ -90,8 +90,8 @@ def _run_dnb_normalization(self, dnb_data, sza_data): """ # convert dask arrays to DataArray objects - dnb_data = xr.DataArray(dnb_data, dims=('y', 'x')) - sza_data = xr.DataArray(sza_data, dims=('y', 'x')) + dnb_data = xr.DataArray(dnb_data, dims=("y", "x")) + sza_data = xr.DataArray(sza_data, dims=("y", "x")) good_mask = ~(dnb_data.isnull() | sza_data.isnull()) output_dataset = dnb_data.where(good_mask) @@ -904,8 +904,8 @@ def __call__(self, datasets, **info): dnb_data += 2.6e-10 dnb_data *= gtot - mda['name'] = self.attrs['name'] - mda['standard_name'] = 'ncc_radiance' + mda["name"] = self.attrs["name"] + mda["standard_name"] = "ncc_radiance" dnb_data.attrs = mda return dnb_data diff --git a/satpy/dataset/anc_vars.py b/satpy/dataset/anc_vars.py index 071a21d786..90b2d7bd3c 100644 --- a/satpy/dataset/anc_vars.py +++ b/satpy/dataset/anc_vars.py @@ -27,7 +27,7 @@ def dataset_walker(datasets): """ for dataset in datasets: yield dataset, None - for anc_ds in dataset.attrs.get('ancillary_variables', []): + for anc_ds in dataset.attrs.get("ancillary_variables", []): try: anc_ds.attrs yield anc_ds, dataset @@ -40,12 +40,12 @@ def replace_anc(dataset, parent_dataset): if parent_dataset is None: return id_keys = parent_dataset.attrs.get( - '_satpy_id_keys', + "_satpy_id_keys", dataset.attrs.get( - '_satpy_id_keys', + "_satpy_id_keys", default_id_keys_config)) current_dataid = DataID(id_keys, **dataset.attrs) - for idx, ds in enumerate(parent_dataset.attrs['ancillary_variables']): + for idx, ds in enumerate(parent_dataset.attrs["ancillary_variables"]): if current_dataid == DataID(id_keys, **ds.attrs): - parent_dataset.attrs['ancillary_variables'][idx] = dataset + parent_dataset.attrs["ancillary_variables"][idx] = dataset return diff --git a/satpy/dataset/data_dict.py b/satpy/dataset/data_dict.py index eb9d8b9662..790d688b24 100644 --- a/satpy/dataset/data_dict.py +++ b/satpy/dataset/data_dict.py @@ -133,9 +133,9 @@ def keys(self, names=False, wavelengths=False): # sort keys so things are a little more deterministic (.keys() is not) keys = sorted(super(DatasetDict, self).keys()) if names: - return (k.get('name') for k in keys) + return (k.get("name") for k in keys) elif wavelengths: - return (k.get('wavelength') for k in keys) + return (k.get("wavelength") for k in keys) else: return keys @@ -181,7 +181,7 @@ def get(self, key, default=None): def __setitem__(self, key, value): """Support assigning 'Dataset' objects or dictionaries of metadata.""" - if hasattr(value, 'attrs'): + if hasattr(value, "attrs"): # xarray.DataArray objects value_info = value.attrs else: @@ -198,7 +198,7 @@ def __setitem__(self, key, value): if isinstance(value_info, dict): value_info.update(new_info) if isinstance(key, DataID): - value_info['_satpy_id'] = key + value_info["_satpy_id"] = key return super(DatasetDict, self).__setitem__(key, value) @@ -215,21 +215,21 @@ def _create_dataid_key(self, key, value_info): else: new_name = value_info.get("name") # this is a new key and it's not a full DataID tuple - if new_name is None and value_info.get('wavelength') is None: + if new_name is None and value_info.get("wavelength") is None: raise ValueError("One of 'name' or 'wavelength' attrs " "values should be set.") id_keys = self._create_id_keys_from_dict(value_info) - value_info['name'] = new_name + value_info["name"] = new_name key = DataID(id_keys, **value_info) return key def _create_id_keys_from_dict(self, value_info_dict): """Create id_keys from dict.""" try: - id_keys = value_info_dict['_satpy_id'].id_keys + id_keys = value_info_dict["_satpy_id"].id_keys except KeyError: try: - id_keys = value_info_dict['_satpy_id_keys'] + id_keys = value_info_dict["_satpy_id_keys"] except KeyError: id_keys = minimal_default_keys_config return id_keys diff --git a/satpy/dataset/dataid.py b/satpy/dataset/dataid.py index 135b2af35b..d8301bc453 100644 --- a/satpy/dataset/dataid.py +++ b/satpy/dataset/dataid.py @@ -34,10 +34,10 @@ def get_keys_from_config(common_id_keys, config): for key, val in common_id_keys.items(): if key in config: id_keys[key] = val - elif val is not None and (val.get('required') is True or val.get('default') is not None): + elif val is not None and (val.get("required") is True or val.get("default") is not None): id_keys[key] = val if not id_keys: - raise ValueError('Metadata does not contain enough information to create a DataID.') + raise ValueError("Metadata does not contain enough information to create a DataID.") return id_keys @@ -57,7 +57,7 @@ def convert(cls, value): try: return cls[value] except KeyError: - raise ValueError('{} invalid value for {}'.format(value, cls)) + raise ValueError("{} invalid value for {}".format(value, cls)) @classmethod def _unpickle(cls, enum_name, enum_members, enum_member): @@ -88,10 +88,10 @@ def __hash__(self): def __repr__(self): """Represent the values.""" - return '<' + str(self) + '>' + return "<" + str(self) + ">" -wlklass = namedtuple("WavelengthRange", "min central max unit", defaults=('µm',)) # type: ignore +wlklass = namedtuple("WavelengthRange", "min central max unit", defaults=("µm",)) # type: ignore class WavelengthRange(wlklass): @@ -196,7 +196,7 @@ def _read_cf_from_string_export(cls, blob): from trollsift import Parser parser = Parser(pattern) res_dict = parser.parse(blob) - res_dict.pop('unit2') + res_dict.pop("unit2") obj = cls(**res_dict) return obj @@ -239,46 +239,46 @@ def __hash__(self): #: Default ID keys DataArrays. -default_id_keys_config = {'name': { - 'required': True, +default_id_keys_config = {"name": { + "required": True, }, - 'wavelength': { - 'type': WavelengthRange, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': { - 'transitive': False, + "resolution": { + "transitive": False, }, - 'calibration': { - 'enum': [ - 'reflectance', - 'brightness_temperature', - 'radiance', - 'radiance_wavenumber', - 'counts' + "calibration": { + "enum": [ + "reflectance", + "brightness_temperature", + "radiance", + "radiance_wavenumber", + "counts" ], - 'transitive': True, + "transitive": True, }, - 'modifiers': { - 'default': ModifierTuple(), - 'type': ModifierTuple, + "modifiers": { + "default": ModifierTuple(), + "type": ModifierTuple, }, } #: Default ID keys for coordinate DataArrays. -default_co_keys_config = {'name': { - 'required': True, +default_co_keys_config = {"name": { + "required": True, }, - 'resolution': { - 'transitive': True, + "resolution": { + "transitive": True, } } #: Minimal ID keys for DataArrays, for example composites. -minimal_default_keys_config = {'name': { - 'required': True, +minimal_default_keys_config = {"name": { + "required": True, }, - 'resolution': { - 'transitive': True, + "resolution": { + "transitive": True, } } @@ -313,11 +313,11 @@ def fix_id_keys(id_keys): for key, val in id_keys.items(): if not val: continue - if 'enum' in val and 'type' in val: - raise ValueError('Cannot have both type and enum for the same id key.') + if "enum" in val and "type" in val: + raise ValueError("Cannot have both type and enum for the same id key.") new_val = copy(val) - if 'enum' in val: - new_val['type'] = ValueList(key, ' '.join(new_val.pop('enum'))) + if "enum" in val: + new_val["type"] = ValueList(key, " ".join(new_val.pop("enum"))) new_id_keys[key] = new_val return new_id_keys @@ -329,12 +329,12 @@ def convert_dict(self, keyvals): for key, val in self._id_keys.items(): if val is None: val = {} - if key in keyvals or val.get('default') is not None or val.get('required'): - curated_val = keyvals.get(key, val.get('default')) - if 'required' in val and curated_val is None: - raise ValueError('Required field {} missing.'.format(key)) - if 'type' in val: - curated[key] = val['type'].convert(curated_val) + if key in keyvals or val.get("default") is not None or val.get("required"): + curated_val = keyvals.get(key, val.get("default")) + if "required" in val and curated_val is None: + raise ValueError("Required field {} missing.".format(key)) + if "type" in val: + curated[key] = val["type"].convert(curated_val) elif curated_val is not None: curated[key] = curated_val @@ -356,17 +356,17 @@ def from_dict(self, keyvals): @classmethod def from_dataarray(cls, array, default_keys=minimal_default_keys_config): """Get the DataID using the dataarray attributes.""" - if '_satpy_id' in array.attrs: - return array.attrs['_satpy_id'] + if "_satpy_id" in array.attrs: + return array.attrs["_satpy_id"] return cls.new_id_from_dataarray(array, default_keys) @classmethod def new_id_from_dataarray(cls, array, default_keys=minimal_default_keys_config): """Create a new DataID from a dataarray's attributes.""" try: - id_keys = array.attrs['_satpy_id'].id_keys + id_keys = array.attrs["_satpy_id"].id_keys except KeyError: - id_keys = array.attrs.get('_satpy_id_keys', default_keys) + id_keys = array.attrs.get("_satpy_id_keys", default_keys) return cls(id_keys, **array.attrs) @property @@ -381,7 +381,7 @@ def create_filter_query_without_required_fields(self, query): except AttributeError: new_query = query.copy() for key, val in self._id_keys.items(): - if val and (val.get('transitive') is not True): + if val and (val.get("transitive") is not True): new_query.pop(key, None) return DataQuery.from_dict(new_query) @@ -431,7 +431,7 @@ def __hash__(self): def _immutable(self, *args, **kws) -> NoReturn: """Raise and error.""" - raise TypeError('Cannot change a DataID') + raise TypeError("Cannot change a DataID") def __lt__(self, other): """Check lesser than.""" @@ -469,7 +469,7 @@ def _find_modifiers_key(self): def create_less_modified_query(self): """Create a query with one less modifier.""" new_dict = self.to_dict() - new_dict['modifiers'] = tuple(new_dict['modifiers'][:-1]) + new_dict["modifiers"] = tuple(new_dict["modifiers"][:-1]) return DataQuery.from_dict(new_dict) def is_modified(self): @@ -536,7 +536,7 @@ def __hash__(self): fields = [] values = [] for field, value in sorted(self._dict.items()): - if value != '*': + if value != "*": fields.append(field) if isinstance(value, (list, set)): value = tuple(value) @@ -568,7 +568,7 @@ def to_dict(self, trim=True): def _to_trimmed_dict(self): return {key: val for key, val in self._dict.items() - if val != '*'} + if val != "*"} def __repr__(self): """Represent the query.""" @@ -595,7 +595,7 @@ def _shares_required_keys(self, dataid): """Check if dataid shares required keys with the current query.""" for key, val in dataid._id_keys.items(): try: - if val.get('required', False): + if val.get("required", False): if key in self._fields: return True except AttributeError: @@ -604,7 +604,7 @@ def _shares_required_keys(self, dataid): def _match_query_value(self, key, id_val): val = self._dict[key] - if val == '*': + if val == "*": return True if isinstance(id_val, tuple) and isinstance(val, (tuple, list)): return tuple(val) == id_val @@ -664,8 +664,8 @@ def sort_dataids(self, dataids): for key in keys: if distance == np.inf: break - val = self._dict.get(key, '*') - if val == '*': + val = self._dict.get(key, "*") + if val == "*": distance = self._add_absolute_distance(dataid, key, distance) else: try: @@ -711,12 +711,12 @@ def _add_distance_from_query(dataid_val, requested_val, distance): def create_less_modified_query(self): """Create a query with one less modifier.""" new_dict = self.to_dict() - new_dict['modifiers'] = tuple(new_dict['modifiers'][:-1]) + new_dict["modifiers"] = tuple(new_dict["modifiers"][:-1]) return DataQuery.from_dict(new_dict) def is_modified(self): """Check if this is modified.""" - return bool(self._dict.get('modifiers')) + return bool(self._dict.get("modifiers")) def create_filtered_query(dataset_key, filter_query): @@ -735,7 +735,7 @@ def create_filtered_query(dataset_key, filter_query): def _update_dict_with_filter_query(ds_dict, filter_query): if filter_query is not None: for key, value in filter_query.items(): - if value != '*': + if value != "*": ds_dict.setdefault(key, value) @@ -744,9 +744,9 @@ def _create_id_dict_from_any_key(dataset_key): ds_dict = dataset_key.to_dict() except AttributeError: if isinstance(dataset_key, str): - ds_dict = {'name': dataset_key} + ds_dict = {"name": dataset_key} elif isinstance(dataset_key, numbers.Number): - ds_dict = {'wavelength': dataset_key} + ds_dict = {"wavelength": dataset_key} else: raise TypeError("Don't know how to interpret a dataset_key of type {}".format(type(dataset_key))) return ds_dict diff --git a/satpy/dataset/metadata.py b/satpy/dataset/metadata.py index 4ba3cde1a1..46f6f622b8 100644 --- a/satpy/dataset/metadata.py +++ b/satpy/dataset/metadata.py @@ -79,7 +79,7 @@ def _combine_shared_info(shared_keys, info_dicts, average_times): shared_info = {} for key in shared_keys: values = [info[key] for info in info_dicts] - if 'time' in key and isinstance(values[0], datetime) and average_times: + if "time" in key and isinstance(values[0], datetime) and average_times: shared_info[key] = average_datetimes(values) elif _are_values_combinable(values): shared_info[key] = values[0] @@ -146,7 +146,7 @@ def _all_arrays_equal(arrays): If the arrays are lazy, just check if they have the same identity. """ - if hasattr(arrays[0], 'compute'): + if hasattr(arrays[0], "compute"): return _all_identical(arrays) return _all_values_equal(arrays) diff --git a/satpy/demo/__init__.py b/satpy/demo/__init__.py index b12c9e285b..e6ad87eb88 100644 --- a/satpy/demo/__init__.py +++ b/satpy/demo/__init__.py @@ -50,7 +50,7 @@ """ -from .abi_l1b import get_hurricane_florence_abi # noqa: F401 +from .abi_l1b import get_hurricane_florence_abi # noqa: F401, I001 from .abi_l1b import get_us_midlatitude_cyclone_abi # noqa: F401 from .ahi_hsd import download_typhoon_surigae_ahi # noqa: F401 from .fci import download_fci_test_data # noqa: F401 diff --git a/satpy/demo/_google_cloud_platform.py b/satpy/demo/_google_cloud_platform.py index c1b7016388..970fd9cfa3 100644 --- a/satpy/demo/_google_cloud_platform.py +++ b/satpy/demo/_google_cloud_platform.py @@ -32,7 +32,7 @@ def is_google_cloud_instance(): """Check if we are on a GCP virtual machine.""" try: - return urlopen('http://metadata.google.internal').headers.get('Metadata-Flavor') == 'Google' + return urlopen("http://metadata.google.internal").headers.get("Metadata-Flavor") == "Google" # nosec except URLError: return False @@ -68,7 +68,7 @@ def get_bucket_files(glob_pattern, base_dir, force=False, pattern_slice=None): if isinstance(glob_pattern, str): glob_pattern = [glob_pattern] - fs = gcsfs.GCSFileSystem(token='anon') + fs = gcsfs.GCSFileSystem(token="anon") # nosec filenames = [] for gp in glob_pattern: # handle multiple glob patterns being treated as one pattern @@ -98,5 +98,5 @@ def _download_gcs_files(globbed_files, fs, base_dir, force): LOG.info("Found existing: {}".format(ondisk_pathname)) continue LOG.info("Downloading: {}".format(ondisk_pathname)) - fs.get('gs://' + fn, ondisk_pathname) + fs.get("gs://" + fn, ondisk_pathname) return filenames diff --git a/satpy/demo/abi_l1b.py b/satpy/demo/abi_l1b.py index e223238767..8583c2580b 100644 --- a/satpy/demo/abi_l1b.py +++ b/satpy/demo/abi_l1b.py @@ -36,19 +36,20 @@ def get_us_midlatitude_cyclone_abi(base_dir=None, method=None, force=False): Total size: ~110MB """ - base_dir = base_dir or config.get('demo_data_dir', '.') + base_dir = base_dir or config.get("demo_data_dir", ".") if method is None: - method = 'gcsfs' - if method not in ['gcsfs']: + method = "gcsfs" + if method not in ["gcsfs"]: raise NotImplementedError("Demo data download method '{}' not " "implemented yet.".format(method)) from ._google_cloud_platform import get_bucket_files - patterns = ['gs://gcp-public-data-goes-16/ABI-L1b-RadC/2019/073/00/*s20190730002*.nc'] - subdir = os.path.join(base_dir, 'abi_l1b', '20190314_us_midlatitude_cyclone') + patterns = ["gs://gcp-public-data-goes-16/ABI-L1b-RadC/2019/073/00/*s20190730002*.nc"] + subdir = os.path.join(base_dir, "abi_l1b", "20190314_us_midlatitude_cyclone") os.makedirs(subdir, exist_ok=True) filenames = get_bucket_files(patterns, subdir, force=force) - assert len(filenames) == 16, "Not all files could be downloaded" + if len(filenames) != 16: + raise RuntimeError("Not all files could be downloaded") return filenames @@ -76,12 +77,12 @@ def get_hurricane_florence_abi(base_dir=None, method=None, force=False, Total size (240 frames, all channels): ~3.5GB """ - base_dir = base_dir or config.get('demo_data_dir', '.') + base_dir = base_dir or config.get("demo_data_dir", ".") if channels is None: channels = range(1, 17) if method is None: - method = 'gcsfs' - if method not in ['gcsfs']: + method = "gcsfs" + if method not in ["gcsfs"]: raise NotImplementedError("Demo data download method '{}' not " "implemented yet.".format(method)) if isinstance(num_frames, (int, float)): @@ -96,16 +97,17 @@ def get_hurricane_florence_abi(base_dir=None, method=None, force=False, # patterns += ['gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/1[3456]/' # '*C{:02d}*s20182541[3456]*.nc'.format(channel)] patterns += [( - 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/13/*RadM1*C{:02d}*s201825413*.nc'.format(channel), - 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/14/*RadM1*C{:02d}*s201825414*.nc'.format(channel), - 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/15/*RadM1*C{:02d}*s201825415*.nc'.format(channel), - 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/16/*RadM1*C{:02d}*s201825416*.nc'.format(channel), + "gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/13/*RadM1*C{:02d}*s201825413*.nc".format(channel), + "gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/14/*RadM1*C{:02d}*s201825414*.nc".format(channel), + "gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/15/*RadM1*C{:02d}*s201825415*.nc".format(channel), + "gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/16/*RadM1*C{:02d}*s201825416*.nc".format(channel), )] - subdir = os.path.join(base_dir, 'abi_l1b', '20180911_hurricane_florence_abi_l1b') + subdir = os.path.join(base_dir, "abi_l1b", "20180911_hurricane_florence_abi_l1b") os.makedirs(subdir, exist_ok=True) filenames = get_bucket_files(patterns, subdir, force=force, pattern_slice=frame_slice) actual_slice = frame_slice.indices(240) # 240 max frames num_frames = int((actual_slice[1] - actual_slice[0]) / actual_slice[2]) - assert len(filenames) == len(channels) * num_frames, "Not all files could be downloaded" + if len(filenames) != len(channels) * num_frames: + raise RuntimeError("Not all files could be downloaded") return filenames diff --git a/satpy/demo/ahi_hsd.py b/satpy/demo/ahi_hsd.py index 784d90719f..5731b23f01 100644 --- a/satpy/demo/ahi_hsd.py +++ b/satpy/demo/ahi_hsd.py @@ -29,7 +29,7 @@ def download_typhoon_surigae_ahi(base_dir=None, This scene shows the Typhoon Surigae. """ import s3fs - base_dir = base_dir or config.get('demo_data_dir', '.') + base_dir = base_dir or config.get("demo_data_dir", ".") channel_resolution = {1: 10, 2: 10, 3: 5, @@ -40,7 +40,7 @@ def download_typhoon_surigae_ahi(base_dir=None, for segment in segments: data_files.append(f"HS_H08_20210417_0500_B{channel:02d}_FLDK_R{resolution:02d}_S{segment:02d}10.DAT.bz2") - subdir = os.path.join(base_dir, 'ahi_hsd', '20210417_0500_typhoon_surigae') + subdir = os.path.join(base_dir, "ahi_hsd", "20210417_0500_typhoon_surigae") os.makedirs(subdir, exist_ok=True) fs = s3fs.S3FileSystem(anon=True) @@ -50,7 +50,7 @@ def download_typhoon_surigae_ahi(base_dir=None, result.append(destination_filename) if os.path.exists(destination_filename): continue - to_get = 'noaa-himawari8/AHI-L1b-FLDK/2021/04/17/0500/' + filename + to_get = "noaa-himawari8/AHI-L1b-FLDK/2021/04/17/0500/" + filename fs.get_file(to_get, destination_filename) return result diff --git a/satpy/demo/fci.py b/satpy/demo/fci.py index 7c4160b203..7a2abe5d20 100644 --- a/satpy/demo/fci.py +++ b/satpy/demo/fci.py @@ -53,5 +53,5 @@ def _unpack_tarfile_to(filename, subdir): """Unpack content of tarfile in filename to subdir.""" with tarfile.open(filename, mode="r:gz") as tf: contents = tf.getnames() - tf.extractall(path=subdir) + tf.extractall(path=subdir) # nosec return contents diff --git a/satpy/demo/utils.py b/satpy/demo/utils.py index 0fd1d1d1a7..63ccbd007f 100644 --- a/satpy/demo/utils.py +++ b/satpy/demo/utils.py @@ -22,7 +22,7 @@ def download_url(source, target): """Download a url in stream mode.""" - with requests.get(source, stream=True) as r: + with requests.get(source, stream=True, timeout=10) as r: r.raise_for_status() with open(target, "wb") as f: for chunk in r.iter_content(chunk_size=8192): diff --git a/satpy/dependency_tree.py b/satpy/dependency_tree.py index 331483cabc..d99fb536eb 100644 --- a/satpy/dependency_tree.py +++ b/satpy/dependency_tree.py @@ -431,7 +431,7 @@ def _find_compositor(self, dataset_key, query): compositor = self.get_modifier(dataset_key) except KeyError: raise KeyError("Can't find anything called {}".format(str(dataset_key))) - compositor.attrs['prerequisites'] = [implicit_dependency_node] + list(compositor.attrs['prerequisites']) + compositor.attrs["prerequisites"] = [implicit_dependency_node] + list(compositor.attrs["prerequisites"]) else: try: compositor = self.get_compositor(dataset_key) @@ -446,14 +446,14 @@ def _find_compositor(self, dataset_key, query): # Get the prerequisites LOG.trace("Looking for composite prerequisites for: {}".format(dataset_key)) prereqs = [create_filtered_query(prereq, prerequisite_filter) if not isinstance(prereq, Node) else prereq - for prereq in compositor.attrs['prerequisites']] + for prereq in compositor.attrs["prerequisites"]] prereqs = self._create_required_subtrees(root, prereqs, query=query) root.add_required_nodes(prereqs) # Get the optionals LOG.trace("Looking for optional prerequisites for: {}".format(dataset_key)) optionals = [create_filtered_query(prereq, prerequisite_filter) if not isinstance(prereq, Node) else prereq - for prereq in compositor.attrs['optional_prerequisites']] + for prereq in compositor.attrs["optional_prerequisites"]] optionals = self._create_optional_subtrees(root, optionals, query=query) root.add_optional_nodes(optionals) @@ -501,7 +501,7 @@ def get_compositor(self, key): def get_modifier(self, comp_id): """Get a modifer.""" # create a DataID for the compositor we are generating - modifier = comp_id['modifiers'][-1] + modifier = comp_id["modifiers"][-1] for sensor_name in sorted(self.modifiers): modifiers = self.modifiers[sensor_name] compositors = self.compositors[sensor_name] @@ -511,7 +511,7 @@ def get_modifier(self, comp_id): mloader, moptions = modifiers[modifier] moptions = moptions.copy() moptions.update(comp_id.to_dict()) - moptions['sensor'] = sensor_name + moptions["sensor"] = sensor_name compositors[comp_id] = mloader(_satpy_id=comp_id, **moptions) return compositors[comp_id] @@ -544,7 +544,7 @@ def _create_optional_subtrees(self, parent, prereqs, query=None): for prereq, unknowns in unknown_datasets.items(): u_str = ", ".join([str(x) for x in unknowns]) - LOG.debug('Skipping optional %s: Unknown dataset %s', + LOG.debug("Skipping optional %s: Unknown dataset %s", str(prereq), u_str) return prereq_nodes diff --git a/satpy/enhancements/__init__.py b/satpy/enhancements/__init__.py index 6f6a66654d..e2dda9cf63 100644 --- a/satpy/enhancements/__init__.py +++ b/satpy/enhancements/__init__.py @@ -31,8 +31,7 @@ from satpy._compat import ArrayLike from satpy._config import get_config_path - -from ..utils import find_in_ancillary +from satpy.utils import find_in_ancillary LOG = logging.getLogger(__name__) @@ -57,8 +56,8 @@ def exclude_alpha(func): @wraps(func) def wrapper(data, **kwargs): - bands = data.coords['bands'].values - exclude = ['A'] if 'A' in bands else [] + bands = data.coords["bands"].values + exclude = ["A"] if "A" in bands else [] band_data = data.sel(bands=[b for b in bands if b not in exclude]) band_data = func(band_data, **kwargs) @@ -67,7 +66,7 @@ def wrapper(data, **kwargs): attrs.update(band_data.attrs) # combine the new data with the excluded data new_data = xr.concat([band_data, data.sel(bands=exclude)], - dim='bands') + dim="bands") data.data = new_data.sel(bands=bands).data data.attrs = attrs return data @@ -92,12 +91,12 @@ def my_enhancement_function(data): def wrapper(data, **kwargs): attrs = data.attrs data_arrs = [] - for idx, band in enumerate(data.coords['bands'].values): + for idx, band in enumerate(data.coords["bands"].values): band_data = func(data.sel(bands=[band]), index=idx, **kwargs) data_arrs.append(band_data) # we assume that the func can add attrs attrs.update(band_data.attrs) - data.data = xr.concat(data_arrs, dim='bands').data + data.data = xr.concat(data_arrs, dim="bands").data data.attrs = attrs return data @@ -248,9 +247,9 @@ def reinhard_to_srgb(img, saturation=1.25, white=100, **kwargs): white /= 100 # extract color components - r = rgb.sel(bands='R').data - g = rgb.sel(bands='G').data - b = rgb.sel(bands='B').data + r = rgb.sel(bands="R").data + g = rgb.sel(bands="G").data + b = rgb.sel(bands="B").data # saturate luma = _compute_luminance_from_rgb(r, g, b) @@ -280,7 +279,7 @@ def _srgb_gamma(arr): def lookup(img, **kwargs): """Assign values to channels based on a table.""" - luts = np.array(kwargs['luts'], dtype=np.float32) / 255.0 + luts = np.array(kwargs["luts"], dtype=np.float32) / 255.0 return _lookup_table(img.data, luts=luts) @@ -352,7 +351,7 @@ def _merge_colormaps(kwargs, img=None): from trollimage.colormap import Colormap full_cmap = None - palette = kwargs['palettes'] + palette = kwargs["palettes"] if isinstance(palette, Colormap): full_cmap = palette else: @@ -457,11 +456,11 @@ def create_colormap(palette, img=None): information. """ - fname = palette.get('filename', None) - colors = palette.get('colors', None) + fname = palette.get("filename", None) + colors = palette.get("colors", None) dataset = palette.get("dataset", None) # are colors between 0-255 or 0-1 - color_scale = palette.get('color_scale', 255) + color_scale = palette.get("color_scale", 255) if fname: if not os.path.exists(fname): fname = get_config_path(fname) @@ -477,9 +476,9 @@ def create_colormap(palette, img=None): if palette.get("reverse", False): cmap.reverse() - if 'min_value' in palette and 'max_value' in palette: + if "min_value" in palette and "max_value" in palette: cmap.set_range(palette["min_value"], palette["max_value"]) - elif 'min_value' in palette or 'max_value' in palette: + elif "min_value" in palette or "max_value" in palette: raise ValueError("Both 'min_value' and 'max_value' must be specified (or neither)") return cmap @@ -498,12 +497,12 @@ def _create_colormap_from_dataset(img, dataset, color_scale): def three_d_effect(img, **kwargs): """Create 3D effect using convolution.""" - w = kwargs.get('weight', 1) + w = kwargs.get("weight", 1) LOG.debug("Applying 3D effect with weight %.2f", w) kernel = np.array([[-w, 0, w], [-w, 1, w], [-w, 0, w]]) - mode = kwargs.get('convolve_mode', 'same') + mode = kwargs.get("convolve_mode", "same") return _three_d_effect(img.data, kernel=kernel, mode=mode) @@ -582,7 +581,7 @@ def jma_true_color_reproduction(img): https://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html """ _jma_true_color_reproduction(img.data, - platform=img.data.attrs['platform_name']) + platform=img.data.attrs["platform_name"]) @exclude_alpha @@ -597,29 +596,29 @@ def _jma_true_color_reproduction(img_data, platform=None): """ # Conversion matrix dictionaries specifying sensor and platform. - ccm_dict = {'himawari-8': np.array([[1.1629, 0.1539, -0.2175], + ccm_dict = {"himawari-8": np.array([[1.1629, 0.1539, -0.2175], [-0.0252, 0.8725, 0.1300], [-0.0204, -0.1100, 1.0633]]), - 'himawari-9': np.array([[1.1619, 0.1542, -0.2168], + "himawari-9": np.array([[1.1619, 0.1542, -0.2168], [-0.0271, 0.8749, 0.1295], [-0.0202, -0.1103, 1.0634]]), - 'goes-16': np.array([[1.1425, 0.1819, -0.2250], + "goes-16": np.array([[1.1425, 0.1819, -0.2250], [-0.0951, 0.9363, 0.1360], [-0.0113, -0.1179, 1.0621]]), - 'goes-17': np.array([[1.1437, 0.1818, -0.2262], + "goes-17": np.array([[1.1437, 0.1818, -0.2262], [-0.0952, 0.9354, 0.1371], [-0.0113, -0.1178, 1.0620]]), - 'goes-18': np.array([[1.1629, 0.1539, -0.2175], + "goes-18": np.array([[1.1629, 0.1539, -0.2175], [-0.0252, 0.8725, 0.1300], [-0.0204, -0.1100, 1.0633]]), - 'mtg-i1': np.array([[0.9007, 0.2086, -0.0100], + "mtg-i1": np.array([[0.9007, 0.2086, -0.0100], [-0.0475, 1.0662, -0.0414], [-0.0123, -0.1342, 1.0794]]), - 'geo-kompsat-2a': np.array([[1.1661, 0.1489, -0.2157], + "geo-kompsat-2a": np.array([[1.1661, 0.1489, -0.2157], [-0.0255, 0.8745, 0.1282], [-0.0205, -0.1103, 1.0637]]), } diff --git a/satpy/enhancements/mimic.py b/satpy/enhancements/mimic.py index c2b028f6e8..3a72d5b247 100644 --- a/satpy/enhancements/mimic.py +++ b/satpy/enhancements/mimic.py @@ -296,11 +296,11 @@ def nrl_colors(img, **kwargs): [74.72527472527473, [218, 131, 185]], [75.0, [220, 135, 189]], ]} - kwargs['palettes'].update(nrl_tpw_colors) - palette = kwargs['palettes'] - palette['colors'] = tuple(map(tuple, palette['colors'])) + kwargs["palettes"].update(nrl_tpw_colors) + palette = kwargs["palettes"] + palette["colors"] = tuple(map(tuple, palette["colors"])) - cm = Colormap(*palette['colors']) + cm = Colormap(*palette["colors"]) img.palettize(cm) @@ -310,8 +310,8 @@ def total_precipitable_water(img, **kwargs): This modifies the image's data so the correct colors can be applied to it, and then palettizes the image. """ - palette = kwargs['palettes'] - palette['colors'] = tuple(map(tuple, palette['colors'])) + palette = kwargs["palettes"] + palette["colors"] = tuple(map(tuple, palette["colors"])) - cm = Colormap(*palette['colors']) + cm = Colormap(*palette["colors"]) img.palettize(cm) diff --git a/satpy/enhancements/viirs.py b/satpy/enhancements/viirs.py index 627fc80220..3ed5e2dd5f 100644 --- a/satpy/enhancements/viirs.py +++ b/satpy/enhancements/viirs.py @@ -27,11 +27,11 @@ def water_detection(img, **kwargs): This modifies the image's data so the correct colors can be applied to it, and then palettizes the image. """ - palette = kwargs['palettes'] - palette['colors'] = tuple(map(tuple, palette['colors'])) + palette = kwargs["palettes"] + palette["colors"] = tuple(map(tuple, palette["colors"])) _water_detection(img.data) - cm = Colormap(*palette['colors']) + cm = Colormap(*palette["colors"]) img.palettize(cm) diff --git a/satpy/modifiers/__init__.py b/satpy/modifiers/__init__.py index c8d32f246e..77d6fe728a 100644 --- a/satpy/modifiers/__init__.py +++ b/satpy/modifiers/__init__.py @@ -20,7 +20,7 @@ # file deepcode ignore W0611: Ignore unused imports in init module from .base import ModifierBase # noqa: F401, isort: skip -from .atmosphere import CO2Corrector # noqa: F401 +from .atmosphere import CO2Corrector # noqa: F401, I001 from .atmosphere import PSPAtmosphericalCorrection # noqa: F401 from .atmosphere import PSPRayleighReflectance # noqa: F401 from .geometry import EffectiveSolarPathLengthCorrector # noqa: F401 diff --git a/satpy/modifiers/_crefl.py b/satpy/modifiers/_crefl.py index 3d34ab9d93..bc42228f26 100644 --- a/satpy/modifiers/_crefl.py +++ b/satpy/modifiers/_crefl.py @@ -70,7 +70,7 @@ def _get_registered_dem_cache_key(self): if not self.url: return reg_files = self.register_data_files([{ - 'url': self.url, 'known_hash': self.known_hash} + "url": self.url, "known_hash": self.known_hash} ]) return reg_files[0] diff --git a/satpy/modifiers/_crefl_utils.py b/satpy/modifiers/_crefl_utils.py index c8d6920056..e83d43b5e2 100644 --- a/satpy/modifiers/_crefl_utils.py +++ b/satpy/modifiers/_crefl_utils.py @@ -282,7 +282,7 @@ def run_crefl(refl, :param avg_elevation: average elevation (usually pre-calculated and stored in CMGDEM.hdf) """ - runner_cls = _runner_class_for_sensor(refl.attrs['sensor']) + runner_cls = _runner_class_for_sensor(refl.attrs["sensor"]) runner = runner_cls(refl) corr_refl = runner(sensor_azimuth, sensor_zenith, solar_azimuth, solar_zenith, avg_elevation) return corr_refl @@ -326,7 +326,7 @@ def _height_from_avg_elevation(self, avg_elevation: Optional[np.ndarray]) -> da. height = 0. else: LOG.debug("Using average elevation information provided to CREFL") - lon, lat = self._refl.attrs['area'].get_lonlats(chunks=self._refl.chunks) + lon, lat = self._refl.attrs["area"].get_lonlats(chunks=self._refl.chunks) height = da.map_blocks(_space_mask_height, lon, lat, avg_elevation, chunks=lon.chunks, dtype=avg_elevation.dtype) return height diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index c888447d76..1d059e1f5a 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -259,7 +259,7 @@ def _hash_args(*args, unhashable_types=DEFAULT_UNCACHE_TYPES): arg = arg.isoformat(" ") hashable_args.append(arg) arg_hash = hashlib.sha1() # nosec - arg_hash.update(json.dumps(tuple(hashable_args)).encode('utf8')) + arg_hash.update(json.dumps(tuple(hashable_args)).encode("utf8")) return arg_hash.hexdigest() @@ -322,7 +322,7 @@ def _chunks_are_irregular(chunks_tuple: tuple) -> bool: def _geo_dask_to_data_array(arr: da.Array) -> xr.DataArray: - return xr.DataArray(arr, dims=('y', 'x')) + return xr.DataArray(arr, dims=("y", "x")) def compute_relative_azimuth(sat_azi: xr.DataArray, sun_azi: xr.DataArray) -> xr.DataArray: @@ -331,6 +331,7 @@ def compute_relative_azimuth(sat_azi: xr.DataArray, sun_azi: xr.DataArray) -> xr Args: sat_azi: DataArray for the satellite azimuth angles, typically in 0-360 degree range. sun_azi: DataArray for the solar azimuth angles, should be in same range as sat_azi. + Returns: A DataArray containing the relative azimuth angle in the 0-180 degree range. @@ -450,7 +451,7 @@ def _get_sun_azimuth_ndarray(lons: np.ndarray, lats: np.ndarray, start_time: dat def _get_sensor_angles(data_arr: xr.DataArray) -> tuple[xr.DataArray, xr.DataArray]: - preference = satpy.config.get('sensor_angles_position_preference', 'actual') + preference = satpy.config.get("sensor_angles_position_preference", "actual") sat_lon, sat_lat, sat_alt = get_satpos(data_arr, preference=preference) area_def = data_arr.attrs["area"] chunks = _geo_chunks_from_data_arr(data_arr) @@ -534,7 +535,7 @@ def _sunzen_corr_cos_ndarray(data: np.ndarray, # gradually fall off for larger zenith angle grad_factor = (np.arccos(cos_zen) - limit_rad) / (max_sza_rad - limit_rad) # invert the factor so maximum correction is done at `limit` and falls off later - with np.errstate(invalid='ignore'): # we expect space pixels to be invalid + with np.errstate(invalid="ignore"): # we expect space pixels to be invalid grad_factor = 1. - np.log(grad_factor + 1) / np.log(2) # make sure we don't make anything negative grad_factor = grad_factor.clip(0.) @@ -571,7 +572,7 @@ def _sunzen_reduction_ndarray(data: np.ndarray, reduction_factor = reduction_factor.clip(0., 1.) # invert the reduction factor such that minimum reduction is done at `limit` and gradually increases towards max_sza - with np.errstate(invalid='ignore'): # we expect space pixels to be invalid + with np.errstate(invalid="ignore"): # we expect space pixels to be invalid reduction_factor = 1. - np.log(reduction_factor + 1) / np.log(2) # apply non-linearity to the reduction factor for a non-linear reduction of the signal. This can be used for a diff --git a/satpy/modifiers/atmosphere.py b/satpy/modifiers/atmosphere.py index e804982330..1c6225f42a 100644 --- a/satpy/modifiers/atmosphere.py +++ b/satpy/modifiers/atmosphere.py @@ -93,28 +93,28 @@ def __call__(self, projectables, optional_datasets=None, **info): ssadiff = compute_relative_azimuth(sata, suna) del sata, suna - atmosphere = self.attrs.get('atmosphere', 'us-standard') - aerosol_type = self.attrs.get('aerosol_type', 'marine_clean_aerosol') - reduce_lim_low = abs(self.attrs.get('reduce_lim_low', 70)) - reduce_lim_high = abs(self.attrs.get('reduce_lim_high', 105)) - reduce_strength = np.clip(self.attrs.get('reduce_strength', 0), 0, 1) + atmosphere = self.attrs.get("atmosphere", "us-standard") + aerosol_type = self.attrs.get("aerosol_type", "marine_clean_aerosol") + reduce_lim_low = abs(self.attrs.get("reduce_lim_low", 70)) + reduce_lim_high = abs(self.attrs.get("reduce_lim_high", 105)) + reduce_strength = np.clip(self.attrs.get("reduce_strength", 0), 0, 1) logger.info("Removing Rayleigh scattering with atmosphere '%s' and " "aerosol type '%s' for '%s'", - atmosphere, aerosol_type, vis.attrs['name']) - corrector = Rayleigh(vis.attrs['platform_name'], vis.attrs['sensor'], + atmosphere, aerosol_type, vis.attrs["name"]) + corrector = Rayleigh(vis.attrs["platform_name"], vis.attrs["sensor"], atmosphere=atmosphere, aerosol_type=aerosol_type) try: refl_cor_band = corrector.get_reflectance(sunz, satz, ssadiff, - vis.attrs['name'], + vis.attrs["name"], red.data) except (KeyError, IOError): - logger.warning("Could not get the reflectance correction using band name: %s", vis.attrs['name']) + logger.warning("Could not get the reflectance correction using band name: %s", vis.attrs["name"]) logger.warning("Will try use the wavelength, however, this may be ambiguous!") refl_cor_band = corrector.get_reflectance(sunz, satz, ssadiff, - vis.attrs['wavelength'][1], + vis.attrs["wavelength"][1], red.data) if reduce_strength > 0: @@ -155,13 +155,13 @@ def __call__(self, projectables, optional_datasets=None, **info): satz = get_satellite_zenith_angle(band) satz = satz.data # get dask array underneath - logger.info('Correction for limb cooling') - corrector = AtmosphericalCorrection(band.attrs['platform_name'], - band.attrs['sensor']) + logger.info("Correction for limb cooling") + corrector = AtmosphericalCorrection(band.attrs["platform_name"], + band.attrs["sensor"]) atm_corr = da.map_blocks(_call_mapped_correction, satz, band.data, corrector=corrector, - band_name=band.attrs['name'], + band_name=band.attrs["name"], meta=np.array((), dtype=band.dtype)) proj = xr.DataArray(atm_corr, attrs=band.attrs, dims=band.dims, coords=band.coords) @@ -187,7 +187,7 @@ class CO2Corrector(ModifierBase): def __call__(self, projectables, optional_datasets=None, **info): """Apply correction.""" ir_039, ir_108, ir_134 = projectables - logger.info('Applying CO2 correction') + logger.info("Applying CO2 correction") dt_co2 = (ir_108 - ir_134) / 4.0 rcorr = ir_108 ** 4 - (ir_108 - dt_co2) ** 4 t4_co2corr = (ir_039 ** 4 + rcorr).clip(0.0) ** 0.25 diff --git a/satpy/modifiers/geometry.py b/satpy/modifiers/geometry.py index 61c406adb6..1194eb036a 100644 --- a/satpy/modifiers/geometry.py +++ b/satpy/modifiers/geometry.py @@ -47,14 +47,14 @@ def __init__(self, max_sza=95.0, **kwargs): def __call__(self, projectables, **info): """Generate the composite.""" - projectables = self.match_data_arrays(list(projectables) + list(info.get('optional_datasets', []))) + projectables = self.match_data_arrays(list(projectables) + list(info.get("optional_datasets", []))) vis = projectables[0] if vis.attrs.get("sunz_corrected"): logger.debug("Sun zenith correction already applied") return vis logger.debug("Applying sun zen correction") - if not info.get('optional_datasets'): + if not info.get("optional_datasets"): # we were not given SZA, generate cos(SZA) logger.debug("Computing sun zenith angles.") from .angles import get_cos_sza diff --git a/satpy/modifiers/spectral.py b/satpy/modifiers/spectral.py index 6fbf695972..e3ea3214b8 100644 --- a/satpy/modifiers/spectral.py +++ b/satpy/modifiers/spectral.py @@ -77,19 +77,19 @@ def _get_reflectance_as_dataarray(self, projectables, optional_datasets): da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) da_sun_zenith = self._get_sun_zenith_from_provided_data(projectables, optional_datasets) - logger.info('Getting reflective part of %s', _nir.attrs['name']) + logger.info("Getting reflective part of %s", _nir.attrs["name"]) reflectance = self._get_reflectance_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, _nir.attrs) proj = self._create_modified_dataarray(reflectance, base_dataarray=_nir) - proj.attrs['units'] = '%' + proj.attrs["units"] = "%" return proj @staticmethod def _get_tb13_4_from_optionals(optional_datasets): tb13_4 = None for dataset in optional_datasets: - wavelengths = dataset.attrs.get('wavelength', [100., 0, 0]) - if (dataset.attrs.get('units') == 'K' and + wavelengths = dataset.attrs.get("wavelength", [100., 0, 0]) + if (dataset.attrs.get("units") == "K" and wavelengths[0] <= 13.4 <= wavelengths[2]): tb13_4 = dataset.data return tb13_4 @@ -108,14 +108,14 @@ def _get_sun_zenith_from_provided_data(projectables, optional_datasets): raise ImportError("Module pyorbital.astronomy needed to compute sun zenith angles.") _nir = projectables[0] lons, lats = _nir.attrs["area"].get_lonlats(chunks=_nir.data.chunks) - sun_zenith = sun_zenith_angle(_nir.attrs['start_time'], lons, lats) + sun_zenith = sun_zenith_angle(_nir.attrs["start_time"], lons, lats) return sun_zenith def _create_modified_dataarray(self, reflectance, base_dataarray): proj = xr.DataArray(reflectance, dims=base_dataarray.dims, coords=base_dataarray.coords, attrs=base_dataarray.attrs.copy()) - proj.attrs['sun_zenith_threshold'] = self.sun_zenith_threshold - proj.attrs['sun_zenith_masking_limit'] = self.masking_limit + proj.attrs["sun_zenith_threshold"] = self.sun_zenith_threshold + proj.attrs["sun_zenith_masking_limit"] = self.masking_limit self.apply_modifier_info(base_dataarray, proj) return proj @@ -130,7 +130,7 @@ def _init_reflectance_calculator(self, metadata): logger.info("Couldn't load pyspectral") raise ImportError("No module named pyspectral.near_infrared_reflectance") - reflectance_3x_calculator = Calculator(metadata['platform_name'], metadata['sensor'], metadata['name'], + reflectance_3x_calculator = Calculator(metadata["platform_name"], metadata["sensor"], metadata["name"], sunz_threshold=self.sun_zenith_threshold, masking_limit=self.masking_limit) return reflectance_3x_calculator @@ -169,11 +169,11 @@ def _get_emissivity_as_dataarray(self, projectables, optional_datasets): da_tb13_4 = self._get_tb13_4_from_optionals(optional_datasets) da_sun_zenith = self._get_sun_zenith_from_provided_data(projectables, optional_datasets) - logger.info('Getting emissive part of %s', _nir.attrs['name']) + logger.info("Getting emissive part of %s", _nir.attrs["name"]) emissivity = self._get_emissivity_as_dask(da_nir, da_tb11, da_tb13_4, da_sun_zenith, _nir.attrs) proj = self._create_modified_dataarray(emissivity, base_dataarray=_nir) - proj.attrs['units'] = 'K' + proj.attrs["units"] = "K" return proj def _get_emissivity_as_dask(self, da_nir, da_tb11, da_tb13_4, da_sun_zenith, metadata): diff --git a/satpy/multiscene/_blend_funcs.py b/satpy/multiscene/_blend_funcs.py index 82597aa3fc..49869a0418 100644 --- a/satpy/multiscene/_blend_funcs.py +++ b/satpy/multiscene/_blend_funcs.py @@ -14,7 +14,7 @@ def stack( data_arrays: Sequence[xr.DataArray], weights: Optional[Sequence[xr.DataArray]] = None, combine_times: bool = True, - blend_type: str = 'select_with_weights' + blend_type: str = "select_with_weights" ) -> xr.DataArray: """Combine a series of datasets in different ways. @@ -143,7 +143,7 @@ def _stack_no_weights( def _combine_stacked_attrs(collected_attrs: Sequence[Mapping], combine_times: bool) -> dict: attrs = combine_metadata(*collected_attrs) - if combine_times and ('start_time' in attrs or 'end_time' in attrs): + if combine_times and ("start_time" in attrs or "end_time" in attrs): new_start, new_end = _get_combined_start_end_times(collected_attrs) if new_start: attrs["start_time"] = new_start @@ -157,10 +157,10 @@ def _get_combined_start_end_times(metadata_objects: Iterable[Mapping]) -> tuple[ start_time = None end_time = None for md_obj in metadata_objects: - if "start_time" in md_obj and (start_time is None or md_obj['start_time'] < start_time): - start_time = md_obj['start_time'] - if "end_time" in md_obj and (end_time is None or md_obj['end_time'] > end_time): - end_time = md_obj['end_time'] + if "start_time" in md_obj and (start_time is None or md_obj["start_time"] < start_time): + start_time = md_obj["start_time"] + if "end_time" in md_obj and (end_time is None or md_obj["end_time"] > end_time): + end_time = md_obj["end_time"] return start_time, end_time @@ -168,7 +168,7 @@ def timeseries(datasets): """Expand dataset with and concatenate by time dimension.""" expanded_ds = [] for ds in datasets: - if 'time' not in ds.dims: + if "time" not in ds.dims: tmp = ds.expand_dims("time") tmp.coords["time"] = pd.DatetimeIndex([ds.attrs["start_time"]]) else: diff --git a/satpy/multiscene/_multiscene.py b/satpy/multiscene/_multiscene.py index c93f5706bc..976fbbbd2a 100644 --- a/satpy/multiscene/_multiscene.py +++ b/satpy/multiscene/_multiscene.py @@ -87,12 +87,12 @@ def _duplicate_dataset_with_group_alias(self, group_id, group_members): alias_id=group_id, ) elif len(member_ids) > 1: - raise ValueError('Cannot add multiple datasets from a scene ' - 'to the same group') + raise ValueError("Cannot add multiple datasets from a scene " + "to the same group") def _get_dataset_id_of_group_members_in_scene(self, group_members): return [ - self.scene[member].attrs['_satpy_id'] + self.scene[member].attrs["_satpy_id"] for member in group_members if member in self.scene ] @@ -281,7 +281,7 @@ def _all_same_area(self, dataset_ids): ds = scn.get(ds_id) if ds is None: continue - all_areas.append(ds.attrs.get('area')) + all_areas.append(ds.attrs.get("area")) all_areas = [area for area in all_areas if area is not None] return all(all_areas[0] == area for area in all_areas[1:]) @@ -314,15 +314,15 @@ def _generate_scene_func(self, gen, func_name, create_new_scene, *args, **kwargs def load(self, *args, **kwargs): """Load the required datasets from the multiple scenes.""" - self._generate_scene_func(self._scenes, 'load', False, *args, **kwargs) + self._generate_scene_func(self._scenes, "load", False, *args, **kwargs) def crop(self, *args, **kwargs): """Crop the multiscene and return a new cropped multiscene.""" - return self._generate_scene_func(self._scenes, 'crop', True, *args, **kwargs) + return self._generate_scene_func(self._scenes, "crop", True, *args, **kwargs) def resample(self, destination=None, **kwargs): """Resample the multiscene.""" - return self._generate_scene_func(self._scenes, 'resample', True, destination=destination, **kwargs) + return self._generate_scene_func(self._scenes, "resample", True, destination=destination, **kwargs) def blend( self, @@ -447,7 +447,7 @@ def save_datasets(self, client=True, batch_size=1, **kwargs): Note ``compute`` can not be provided. """ - if 'compute' in kwargs: + if "compute" in kwargs: raise ValueError("The 'compute' keyword argument can not be provided.") client = self._get_client(client=client) @@ -466,15 +466,15 @@ def _get_animation_info(self, all_datasets, filename, fill_value=None): first_img = get_enhanced_image(first_dataset) first_img_data = first_img.finalize(fill_value=fill_value)[0] shape = tuple(first_img_data.sizes.get(dim_name) - for dim_name in ('y', 'x', 'bands')) - if fill_value is None and filename.endswith('gif'): + for dim_name in ("y", "x", "bands")) + if fill_value is None and filename.endswith("gif"): log.warning("Forcing fill value to '0' for GIF Luminance images") fill_value = 0 shape = shape[:2] attrs = first_dataset.attrs.copy() - if 'end_time' in last_dataset.attrs: - attrs['end_time'] = last_dataset.attrs['end_time'] + if "end_time" in last_dataset.attrs: + attrs["end_time"] = last_dataset.attrs["end_time"] this_fn = filename.format(**attrs) return this_fn, shape, fill_value @@ -508,7 +508,7 @@ def _get_single_frame(self, ds, enh_args, fill_value): # assume all other shapes are (y, x) # we need arrays grouped by pixel so # transpose if needed - data = data.transpose('y', 'x', 'bands') + data = data.transpose("y", "x", "bands") return data def _get_animation_frames(self, all_datasets, shape, fill_value=None, @@ -603,7 +603,7 @@ def _get_writers_and_frames( first_scene = self.first_scene scenes = iter(self._scene_gen) info_scenes = [first_scene] - if 'end_time' in filename: + if "end_time" in filename: # if we need the last scene to generate the filename # then compute all the scenes so we can figure it out log.debug("Generating scenes to compute end_time for filename") diff --git a/satpy/node.py b/satpy/node.py index f1cf401057..191ec0bbcf 100644 --- a/satpy/node.py +++ b/satpy/node.py @@ -121,8 +121,8 @@ def display(self, previous=0, include_data=False): """Display the node.""" no_data = " (No Data)" if self.data is None else "" return ( - (" +" * previous) + str(self.name) + no_data + '\n' + - ''.join([child.display(previous + 1) for child in self.children])) + (" +" * previous) + str(self.name) + no_data + "\n" + + "".join([child.display(previous + 1) for child in self.children])) def leaves(self, unique=True): """Get the leaves of the tree starting at this root.""" @@ -204,12 +204,12 @@ class ReaderNode(Node): def __init__(self, unique_id, reader_name): """Set up the node.""" - super().__init__(unique_id, data={'reader_name': reader_name}) + super().__init__(unique_id, data={"reader_name": reader_name}) def _copy_name_and_data(self, node_cache): - return ReaderNode(self.name, self.data['reader_name']) + return ReaderNode(self.name, self.data["reader_name"]) @property def reader_name(self): """Get the name of the reader.""" - return self.data['reader_name'] + return self.data["reader_name"] diff --git a/satpy/plugin_base.py b/satpy/plugin_base.py index ee19341796..286b5fc335 100644 --- a/satpy/plugin_base.py +++ b/satpy/plugin_base.py @@ -60,5 +60,5 @@ def __init__(self, default_config_filename=None, config_files=None, **kwargs): def load_yaml_config(self, conf): """Load a YAML configuration file and recursively update the overall configuration.""" - with open(conf, 'r', encoding='utf-8') as fd: + with open(conf, "r", encoding="utf-8") as fd: self.config = recursive_dict_update(self.config, yaml.load(fd, Loader=UnsafeLoader)) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 2b1bbc37ba..81ebf2393b 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -37,7 +37,7 @@ # Old Name -> New Name -PENDING_OLD_READER_NAMES = {'fci_l1c_fdhsi': 'fci_l1c_nc', 'viirs_l2_cloud_mask_nc': 'viirs_edr'} +PENDING_OLD_READER_NAMES = {"fci_l1c_fdhsi": "fci_l1c_nc", "viirs_l2_cloud_mask_nc": "viirs_edr"} OLD_READER_NAMES: dict[str, str] = {} @@ -171,7 +171,7 @@ def _get_file_keys_for_reader_files(reader_files, group_keys=None): file_keys = {} for (reader_name, (reader_instance, files_to_sort)) in reader_files.items(): if group_keys is None: - group_keys = reader_instance.info.get('group_keys', ('start_time',)) + group_keys = reader_instance.info.get("group_keys", ("start_time",)) file_keys[reader_name] = [] # make a copy because filename_items_for_filetype will modify inplace files_to_sort = set(files_to_sort) @@ -301,7 +301,7 @@ def _get_keys_with_empty_values(grp): def read_reader_config(config_files, loader=UnsafeLoader): """Read the reader `config_files` and return the extracted reader metadata.""" reader_config = load_yaml_reader_configs(*config_files, loader=loader) - return reader_config['reader'] + return reader_config["reader"] def load_reader(reader_configs, **reader_kwargs): @@ -324,16 +324,16 @@ def configs_for_reader(reader=None): reader = get_valid_reader_names(reader) # given a config filename or reader name - config_files = [r if r.endswith('.yaml') else r + '.yaml' for r in reader] + config_files = [r if r.endswith(".yaml") else r + ".yaml" for r in reader] else: - paths = get_entry_points_config_dirs('satpy.readers') - reader_configs = glob_config(os.path.join('readers', '*.yaml'), search_dirs=paths) + paths = get_entry_points_config_dirs("satpy.readers") + reader_configs = glob_config(os.path.join("readers", "*.yaml"), search_dirs=paths) config_files = set(reader_configs) for config_file in config_files: config_basename = os.path.basename(config_file) reader_name = os.path.splitext(config_basename)[0] - paths = get_entry_points_config_dirs('satpy.readers') + paths = get_entry_points_config_dirs("satpy.readers") reader_configs = config_search_paths( os.path.join("readers", config_basename), search_dirs=paths, check_exists=True) @@ -393,9 +393,9 @@ def available_readers(as_dict=False, yaml_loader=UnsafeLoader): LOG.debug("Could not import reader config from: %s", reader_configs) LOG.debug("Error loading YAML", exc_info=True) continue - readers.append(reader_info if as_dict else reader_info['name']) + readers.append(reader_info if as_dict else reader_info["name"]) if as_dict: - readers = sorted(readers, key=lambda reader_info: reader_info['name']) + readers = sorted(readers, key=lambda reader_info: reader_info["name"]) else: readers = sorted(readers) return readers @@ -467,13 +467,13 @@ def find_files_and_readers(start_time=None, end_time=None, base_dir=None, """ reader_files = {} reader_kwargs = reader_kwargs or {} - filter_parameters = filter_parameters or reader_kwargs.get('filter_parameters', {}) + filter_parameters = filter_parameters or reader_kwargs.get("filter_parameters", {}) sensor_supported = False if start_time or end_time: - filter_parameters['start_time'] = start_time - filter_parameters['end_time'] = end_time - reader_kwargs['filter_parameters'] = filter_parameters + filter_parameters["start_time"] = start_time + filter_parameters["end_time"] = end_time + reader_kwargs["filter_parameters"] = filter_parameters for reader_configs in configs_for_reader(reader): (reader_instance, loadables, this_sensor_supported) = _get_loadables_for_reader_config( @@ -509,7 +509,7 @@ def _get_loadables_for_reader_config(base_dir, reader, sensor, reader_configs, try: reader_instance = load_reader(reader_configs, **reader_kwargs) except (KeyError, IOError, yaml.YAMLError) as err: - LOG.info('Cannot use %s', str(reader_configs)) + LOG.info("Cannot use %s", str(reader_configs)) LOG.debug(str(err)) if reader and (isinstance(reader, str) or len(reader) == 1): # if it is a single reader then give a more usable error @@ -563,7 +563,7 @@ def load_readers(filenames=None, reader=None, reader_kwargs=None): reader_configs, **reader_kwargs[None if reader is None else reader[idx]]) except (KeyError, IOError, yaml.YAMLError) as err: - LOG.info('Cannot use %s', str(reader_configs)) + LOG.info("Cannot use %s", str(reader_configs)) LOG.debug(str(err)) continue @@ -644,7 +644,7 @@ def _get_reader_kwargs(reader, reader_kwargs): reader_kwargs_without_filter = {} for (k, v) in reader_kwargs.items(): reader_kwargs_without_filter[k] = v.copy() - reader_kwargs_without_filter[k].pop('filter_parameters', None) + reader_kwargs_without_filter[k].pop("filter_parameters", None) return (reader_kwargs, reader_kwargs_without_filter) diff --git a/satpy/readers/_geos_area.py b/satpy/readers/_geos_area.py index 0775e51381..03dabfa9a0 100644 --- a/satpy/readers/_geos_area.py +++ b/satpy/readers/_geos_area.py @@ -72,6 +72,7 @@ def get_area_extent(pdict): coff: Column offset factor loff: Line offset factor scandir: 'N2S' for standard (N->S), 'S2N' for inverse (S->N) + Returns: aex: An area extent for the scene @@ -79,7 +80,7 @@ def get_area_extent(pdict): # count starts at 1 cols = 1 - 0.5 - if pdict['scandir'] == 'S2N': + if pdict["scandir"] == "S2N": lines = 0.5 - 1 scanmult = -1 else: @@ -88,22 +89,22 @@ def get_area_extent(pdict): # Lower left x, y scanning angles in degrees ll_x, ll_y = get_xy_from_linecol(lines * scanmult, cols, - (pdict['loff'], pdict['coff']), - (pdict['lfac'], pdict['cfac'])) + (pdict["loff"], pdict["coff"]), + (pdict["lfac"], pdict["cfac"])) - cols += pdict['ncols'] - lines += pdict['nlines'] + cols += pdict["ncols"] + lines += pdict["nlines"] # Upper right x, y scanning angles in degrees ur_x, ur_y = get_xy_from_linecol(lines * scanmult, cols, - (pdict['loff'], pdict['coff']), - (pdict['lfac'], pdict['cfac'])) - if pdict['scandir'] == 'S2N': + (pdict["loff"], pdict["coff"]), + (pdict["lfac"], pdict["cfac"])) + if pdict["scandir"] == "S2N": ll_y *= -1 ur_y *= -1 # Convert degrees to radians and create area extent - aex = make_ext(ll_x=ll_x, ur_x=ur_x, ll_y=ll_y, ur_y=ur_y, h=pdict['h']) + aex = make_ext(ll_x=ll_x, ur_x=ur_x, ll_y=ll_y, ur_y=ur_y, h=pdict["h"]) return aex @@ -132,20 +133,20 @@ def get_area_definition(pdict, a_ext): The AreaDefinition `proj_id` attribute is being deprecated. """ - proj_dict = {'a': float(pdict['a']), - 'b': float(pdict['b']), - 'lon_0': float(pdict['ssp_lon']), - 'h': float(pdict['h']), - 'proj': 'geos', - 'units': 'm'} + proj_dict = {"a": float(pdict["a"]), + "b": float(pdict["b"]), + "lon_0": float(pdict["ssp_lon"]), + "h": float(pdict["h"]), + "proj": "geos", + "units": "m"} a_def = geometry.AreaDefinition( - pdict['a_name'], - pdict['a_desc'], - pdict['p_id'], + pdict["a_name"], + pdict["a_desc"], + pdict["p_id"], proj_dict, - int(pdict['ncols']), - int(pdict['nlines']), + int(pdict["ncols"]), + int(pdict["nlines"]), a_ext) return a_def @@ -178,6 +179,7 @@ def get_geos_area_naming(input_dict): input_dict: dict Dictionary with keys `platform_name`, `instrument_name`, `service_name`, `service_desc`, `resolution` . The resolution is expected in meters. + Returns: area_naming_dict with `area_id`, `description` keys, values are strings. @@ -189,21 +191,21 @@ def get_geos_area_naming(input_dict): """ area_naming_dict = {} - resolution_strings = get_resolution_and_unit_strings(input_dict['resolution']) + resolution_strings = get_resolution_and_unit_strings(input_dict["resolution"]) - area_naming_dict['area_id'] = '{}_{}_{}_{}{}'.format(input_dict['platform_name'].lower(), - input_dict['instrument_name'].lower(), - input_dict['service_name'].lower(), - resolution_strings['value'], - resolution_strings['unit'] + area_naming_dict["area_id"] = "{}_{}_{}_{}{}".format(input_dict["platform_name"].lower(), + input_dict["instrument_name"].lower(), + input_dict["service_name"].lower(), + resolution_strings["value"], + resolution_strings["unit"] ) - area_naming_dict['description'] = '{} {} {} area definition ' \ - 'with {} {} resolution'.format(input_dict['platform_name'].upper(), - input_dict['instrument_name'].upper(), - input_dict['service_desc'], - resolution_strings['value'], - resolution_strings['unit'] + area_naming_dict["description"] = "{} {} {} area definition " \ + "with {} {} resolution".format(input_dict["platform_name"].upper(), + input_dict["instrument_name"].upper(), + input_dict["service_desc"], + resolution_strings["value"], + resolution_strings["unit"] ) return area_naming_dict @@ -222,8 +224,8 @@ def get_resolution_and_unit_strings(resolution): Dictionary with `value` and `unit` keys, values are strings. """ if resolution >= 1000: - return {'value': '{:.0f}'.format(resolution*1e-3), - 'unit': 'km'} + return {"value": "{:.0f}".format(resolution*1e-3), + "unit": "km"} - return {'value': '{:.0f}'.format(resolution), - 'unit': 'm'} + return {"value": "{:.0f}".format(resolution), + "unit": "m"} diff --git a/satpy/readers/aapp_l1b.py b/satpy/readers/aapp_l1b.py index ea3877e48a..e502a9da64 100644 --- a/satpy/readers/aapp_l1b.py +++ b/satpy/readers/aapp_l1b.py @@ -54,24 +54,24 @@ def get_aapp_chunks(shape): AVHRR_CHANNEL_NAMES = ["1", "2", "3a", "3b", "4", "5"] -AVHRR_ANGLE_NAMES = ['sensor_zenith_angle', - 'solar_zenith_angle', - 'sun_sensor_azimuth_difference_angle'] +AVHRR_ANGLE_NAMES = ["sensor_zenith_angle", + "solar_zenith_angle", + "sun_sensor_azimuth_difference_angle"] -AVHRR_PLATFORM_IDS2NAMES = {4: 'NOAA-15', - 2: 'NOAA-16', - 6: 'NOAA-17', - 7: 'NOAA-18', - 8: 'NOAA-19', - 11: 'Metop-B', - 12: 'Metop-A', - 13: 'Metop-C', - 14: 'Metop simulator'} +AVHRR_PLATFORM_IDS2NAMES = {4: "NOAA-15", + 2: "NOAA-16", + 6: "NOAA-17", + 7: "NOAA-18", + 8: "NOAA-19", + 11: "Metop-B", + 12: "Metop-A", + 13: "Metop-C", + 14: "Metop simulator"} def create_xarray(arr): """Create an `xarray.DataArray`.""" - res = xr.DataArray(arr, dims=['y', 'x']) + res = xr.DataArray(arr, dims=["y", "x"]) return res @@ -102,30 +102,30 @@ def _set_filedata_layout(self): @property def start_time(self): """Get the time of the first observation.""" - return datetime(self._data['scnlinyr'][0], 1, 1) + timedelta( - days=int(self._data['scnlindy'][0]) - 1, - milliseconds=int(self._data['scnlintime'][0])) + return datetime(self._data["scnlinyr"][0], 1, 1) + timedelta( + days=int(self._data["scnlindy"][0]) - 1, + milliseconds=int(self._data["scnlintime"][0])) @property def end_time(self): """Get the time of the final observation.""" - return datetime(self._data['scnlinyr'][-1], 1, 1) + timedelta( - days=int(self._data['scnlindy'][-1]) - 1, - milliseconds=int(self._data['scnlintime'][-1])) + return datetime(self._data["scnlinyr"][-1], 1, 1) + timedelta( + days=int(self._data["scnlindy"][-1]) - 1, + milliseconds=int(self._data["scnlintime"][-1])) def _update_dataset_attributes(self, dataset, key, info): - dataset.attrs.update({'platform_name': self.platform_name, - 'sensor': self.sensor}) + dataset.attrs.update({"platform_name": self.platform_name, + "sensor": self.sensor}) dataset.attrs.update(key.to_dict()) - for meta_key in ('standard_name', 'units'): + for meta_key in ("standard_name", "units"): if meta_key in info: dataset.attrs.setdefault(meta_key, info[meta_key]) def _get_platform_name(self, platform_names_lookup): """Get the platform name from the file header.""" - self.platform_name = platform_names_lookup.get(self._header['satid'][0], None) + self.platform_name = platform_names_lookup.get(self._header["satid"][0], None) if self.platform_name is None: - raise ValueError("Unsupported platform ID: %d" % self.header['satid']) + raise ValueError("Unsupported platform ID: %d" % self.header["satid"]) def read(self): """Read the data.""" @@ -143,17 +143,17 @@ def _calibrate_active_channel_data(self, key): def get_dataset(self, key, info): """Get a dataset from the file.""" - if key['name'] in self._channel_names: + if key["name"] in self._channel_names: dataset = self._calibrate_active_channel_data(key) if dataset is None: return None - elif key['name'] in ['longitude', 'latitude']: - dataset = self.navigate(key['name']) + elif key["name"] in ["longitude", "latitude"]: + dataset = self.navigate(key["name"]) dataset.attrs = info - elif key['name'] in self._angle_names: - dataset = self.get_angles(key['name']) + elif key["name"] in self._angle_names: + dataset = self.get_angles(key["name"]) else: - raise ValueError("Not a supported dataset: %s", key['name']) + raise ValueError("Not a supported dataset: %s", key["name"]) self._update_dataset_attributes(dataset, key, info) return dataset @@ -168,7 +168,7 @@ def __init__(self, filename, filename_info, filetype_info): filetype_info) self.channels = {i: None for i in AVHRR_CHANNEL_NAMES} - self.units = {i: 'counts' for i in AVHRR_CHANNEL_NAMES} + self.units = {i: "counts" for i in AVHRR_CHANNEL_NAMES} self._is3b = None self._is3a = None @@ -181,7 +181,7 @@ def __init__(self, filename, filename_info, filetype_info): self.active_channels = self._get_active_channels() self._get_platform_name(AVHRR_PLATFORM_IDS2NAMES) - self.sensor = 'avhrr-3' + self.sensor = "avhrr-3" self._get_all_interpolated_angles = functools.lru_cache(maxsize=10)( self._get_all_interpolated_angles_uncached @@ -202,25 +202,25 @@ def _get_active_channels(self): def _calibrate_active_channel_data(self, key): """Calibrate active channel data only.""" - if self.active_channels[key['name']]: + if self.active_channels[key["name"]]: return self.calibrate(key) return None def _get_channel_binary_status_from_header(self): - status = self._header['inststat1'].item() - change_line = self._header['statchrecnb'] + status = self._header["inststat1"].item() + change_line = self._header["statchrecnb"] if change_line > 0: - status |= self._header['inststat2'].item() + status |= self._header["inststat2"].item() return status @staticmethod def _convert_binary_channel_status_to_activation_dict(status): - bits_channels = ((13, '1'), - (12, '2'), - (11, '3a'), - (10, '3b'), - (9, '4'), - (8, '5')) + bits_channels = ((13, "1"), + (12, "2"), + (11, "3a"), + (10, "3b"), + (9, "4"), + (8, "5")) activated = dict() for bit, channel_name in bits_channels: activated[channel_name] = bool(status >> bit & 1) @@ -229,8 +229,8 @@ def _convert_binary_channel_status_to_activation_dict(status): def available_datasets(self, configured_datasets=None): """Get the available datasets.""" for _, mda in configured_datasets: - if mda['name'] in self._channel_names: - yield self.active_channels[mda['name']], mda + if mda["name"] in self._channel_names: + yield self.active_channels[mda["name"]], mda else: yield True, mda @@ -285,9 +285,9 @@ def _create_40km_interpolator(lines, *arrays_40km, geolocation=False): def navigate(self, coordinate_id): """Get the longitudes and latitudes of the scene.""" lons, lats = self._get_all_interpolated_coordinates() - if coordinate_id == 'longitude': + if coordinate_id == "longitude": return create_xarray(lons) - if coordinate_id == 'latitude': + if coordinate_id == "latitude": return create_xarray(lats) raise KeyError("Coordinate {} unknown.".format(coordinate_id)) @@ -309,49 +309,49 @@ def calibrate(self, if calib_coeffs is None: calib_coeffs = {} - units = {'reflectance': '%', - 'brightness_temperature': 'K', - 'counts': '', - 'radiance': 'W*m-2*sr-1*cm ?'} + units = {"reflectance": "%", + "brightness_temperature": "K", + "counts": "", + "radiance": "W*m-2*sr-1*cm ?"} - if dataset_id['name'] in ("3a", "3b") and self._is3b is None: + if dataset_id["name"] in ("3a", "3b") and self._is3b is None: # Is it 3a or 3b: line_chunks = get_aapp_chunks((self._data.shape[0], 2048))[0] - self._is3a = da.bitwise_and(da.from_array(self._data['scnlinbit'], + self._is3a = da.bitwise_and(da.from_array(self._data["scnlinbit"], chunks=line_chunks), 3) == 0 - self._is3b = da.bitwise_and(da.from_array(self._data['scnlinbit'], + self._is3b = da.bitwise_and(da.from_array(self._data["scnlinbit"], chunks=line_chunks), 3) == 1 try: - vis_idx = ['1', '2', '3a'].index(dataset_id['name']) + vis_idx = ["1", "2", "3a"].index(dataset_id["name"]) ir_idx = None except ValueError: vis_idx = None - ir_idx = ['3b', '4', '5'].index(dataset_id['name']) + ir_idx = ["3b", "4", "5"].index(dataset_id["name"]) mask = True if vis_idx is not None: - coeffs = calib_coeffs.get('ch' + dataset_id['name']) - if dataset_id['name'] == '3a': + coeffs = calib_coeffs.get("ch" + dataset_id["name"]) + if dataset_id["name"] == "3a": mask = self._is3a[:, None] ds = create_xarray( _vis_calibrate(self._data, vis_idx, - dataset_id['calibration'], + dataset_id["calibration"], pre_launch_coeffs, coeffs, mask=mask)) else: - if dataset_id['name'] == '3b': + if dataset_id["name"] == "3b": mask = self._is3b[:, None] ds = create_xarray( _ir_calibrate(self._header, self._data, ir_idx, - dataset_id['calibration'], + dataset_id["calibration"], mask=mask)) - ds.attrs['units'] = units[dataset_id['calibration']] + ds.attrs["units"] = units[dataset_id["calibration"]] ds.attrs.update(dataset_id._asdict()) return ds @@ -545,8 +545,8 @@ def _vis_calibrate(data, """ # Calibration count to albedo, the calibration is performed separately for # two value ranges. - if calib_type not in ['counts', 'radiance', 'reflectance']: - raise ValueError('Calibration ' + calib_type + ' unknown!') + if calib_type not in ["counts", "radiance", "reflectance"]: + raise ValueError("Calibration " + calib_type + " unknown!") channel_data = data["hrpt"][:, :, chn] chunks = get_aapp_chunks(channel_data.shape) @@ -554,12 +554,12 @@ def _vis_calibrate(data, channel = da.from_array(channel_data, chunks=chunks) mask &= channel != 0 - if calib_type == 'counts': + if calib_type == "counts": return channel channel = channel.astype(CHANNEL_DTYPE) - if calib_type == 'radiance': + if calib_type == "radiance": logger.info("Radiances are not yet supported for " + "the VIS/NIR channels!") @@ -630,9 +630,9 @@ def _ir_calibrate(header, data, irchn, calib_type, mask=True): mask &= count != 0 count = count.astype(CHANNEL_DTYPE) - k1_ = da.from_array(data['calir'][:, irchn, 0, 0], chunks=line_chunks) / 1.0e9 - k2_ = da.from_array(data['calir'][:, irchn, 0, 1], chunks=line_chunks) / 1.0e6 - k3_ = da.from_array(data['calir'][:, irchn, 0, 2], chunks=line_chunks) / 1.0e6 + k1_ = da.from_array(data["calir"][:, irchn, 0, 0], chunks=line_chunks) / 1.0e9 + k2_ = da.from_array(data["calir"][:, irchn, 0, 1], chunks=line_chunks) / 1.0e6 + k3_ = da.from_array(data["calir"][:, irchn, 0, 2], chunks=line_chunks) / 1.0e6 # Count to radiance conversion: rad = k1_[:, None] * count * count + k2_[:, None] * count + k3_[:, None] @@ -645,14 +645,14 @@ def _ir_calibrate(header, data, irchn, calib_type, mask=True): return da.where(mask, rad, np.nan) # Central wavenumber: - cwnum = header['radtempcnv'][0, irchn, 0] + cwnum = header["radtempcnv"][0, irchn, 0] if irchn == 0: cwnum = cwnum / 1.0e2 else: cwnum = cwnum / 1.0e3 - bandcor_2 = header['radtempcnv'][0, irchn, 1] / 1e5 - bandcor_3 = header['radtempcnv'][0, irchn, 2] / 1e6 + bandcor_2 = header["radtempcnv"][0, irchn, 1] / 1e5 + bandcor_3 = header["radtempcnv"][0, irchn, 2] / 1e6 ir_const_1 = 1.1910659e-5 ir_const_2 = 1.438833 diff --git a/satpy/readers/aapp_mhs_amsub_l1c.py b/satpy/readers/aapp_mhs_amsub_l1c.py index f5765545f3..a05d70e501 100644 --- a/satpy/readers/aapp_mhs_amsub_l1c.py +++ b/satpy/readers/aapp_mhs_amsub_l1c.py @@ -36,21 +36,21 @@ CHUNK_SIZE = get_legacy_chunk_size() LINE_CHUNK = CHUNK_SIZE ** 2 // 90 -MHS_AMSUB_CHANNEL_NAMES = ['1', '2', '3', '4', '5'] -MHS_AMSUB_ANGLE_NAMES = ['sensor_zenith_angle', 'sensor_azimuth_angle', - 'solar_zenith_angle', 'solar_azimuth_difference_angle'] +MHS_AMSUB_CHANNEL_NAMES = ["1", "2", "3", "4", "5"] +MHS_AMSUB_ANGLE_NAMES = ["sensor_zenith_angle", "sensor_azimuth_angle", + "solar_zenith_angle", "solar_azimuth_difference_angle"] -MHS_AMSUB_PLATFORM_IDS2NAMES = {15: 'NOAA-15', - 16: 'NOAA-16', - 17: 'NOAA-17', - 18: 'NOAA-18', - 19: 'NOAA-19', - 1: 'Metop-B', - 2: 'Metop-A', - 3: 'Metop-C', - 4: 'Metop simulator'} +MHS_AMSUB_PLATFORM_IDS2NAMES = {15: "NOAA-15", + 16: "NOAA-16", + 17: "NOAA-17", + 18: "NOAA-18", + 19: "NOAA-19", + 1: "Metop-B", + 2: "Metop-A", + 3: "Metop-C", + 4: "Metop simulator"} -MHS_AMSUB_PLATFORMS = ['Metop-A', 'Metop-B', 'Metop-C', 'NOAA-18', 'NOAA-19'] +MHS_AMSUB_PLATFORMS = ["Metop-A", "Metop-B", "Metop-C", "NOAA-18", "NOAA-19"] class MHS_AMSUB_AAPPL1CFile(AAPPL1BaseFileHandler): @@ -61,7 +61,7 @@ def __init__(self, filename, filename_info, filetype_info): super().__init__(filename, filename_info, filetype_info) self.channels = {i: None for i in MHS_AMSUB_CHANNEL_NAMES} - self.units = {i: 'brightness_temperature' for i in MHS_AMSUB_CHANNEL_NAMES} + self.units = {i: "brightness_temperature" for i in MHS_AMSUB_CHANNEL_NAMES} self._channel_names = MHS_AMSUB_CHANNEL_NAMES self._angle_names = MHS_AMSUB_ANGLE_NAMES @@ -80,10 +80,10 @@ def _set_filedata_layout(self): def _get_sensorname(self): """Get the sensor name from the header.""" - if self._header['instrument'][0] == 11: - self.sensor = 'amsub' - elif self._header['instrument'][0] == 12: - self.sensor = 'mhs' + if self._header["instrument"][0] == 11: + self.sensor = "amsub" + elif self._header["instrument"][0] == 12: + self.sensor = "mhs" else: raise IOError("Sensor neither MHS nor AMSU-B!") @@ -101,9 +101,9 @@ def get_angles(self, angle_id): def navigate(self, coordinate_id): """Get the longitudes and latitudes of the scene.""" lons, lats = self._get_coordinates_in_degrees() - if coordinate_id == 'longitude': + if coordinate_id == "longitude": return create_xarray(lons) - if coordinate_id == 'latitude': + if coordinate_id == "latitude": return create_xarray(lats) raise KeyError("Coordinate {} unknown.".format(coordinate_id)) @@ -119,17 +119,17 @@ def _calibrate_active_channel_data(self, key): def calibrate(self, dataset_id): """Calibrate the data.""" - units = {'brightness_temperature': 'K'} + units = {"brightness_temperature": "K"} mask = True - idx = ['1', '2', '3', '4', '5'].index(dataset_id['name']) + idx = ["1", "2", "3", "4", "5"].index(dataset_id["name"]) ds = create_xarray( _calibrate(self._data, idx, - dataset_id['calibration'], + dataset_id["calibration"], mask=mask)) - ds.attrs['units'] = units[dataset_id['calibration']] + ds.attrs["units"] = units[dataset_id["calibration"]] ds.attrs.update(dataset_id._asdict()) return ds @@ -143,13 +143,13 @@ def _calibrate(data, *calib_type* in brightness_temperature. """ - if calib_type not in ['brightness_temperature']: - raise ValueError('Calibration ' + calib_type + ' unknown!') + if calib_type not in ["brightness_temperature"]: + raise ValueError("Calibration " + calib_type + " unknown!") channel = da.from_array(data["btemps"][:, :, chn] / 100., chunks=(LINE_CHUNK, 90)) mask &= channel != 0 - if calib_type == 'counts': + if calib_type == "counts": return channel channel = channel.astype(np.float64) diff --git a/satpy/readers/abi_base.py b/satpy/readers/abi_base.py index 4a6bf069c1..0b80045767 100644 --- a/satpy/readers/abi_base.py +++ b/satpy/readers/abi_base.py @@ -34,13 +34,13 @@ CHUNK_SIZE = get_legacy_chunk_size() PLATFORM_NAMES = { - 'g16': 'GOES-16', - 'g17': 'GOES-17', - 'g18': 'GOES-18', - 'g19': 'GOES-19', - 'goes16': 'GOES-16', - 'goes17': 'GOES-17', - 'goes18': 'GOES-18', + "g16": "GOES-16", + "g17": "GOES-17", + "g18": "GOES-18", + "g19": "GOES-19", + "goes16": "GOES-16", + "goes17": "GOES-17", + "goes18": "GOES-18", } @@ -51,11 +51,11 @@ def __init__(self, filename, filename_info, filetype_info): """Open the NetCDF file with xarray and prepare the Dataset for reading.""" super(NC_ABI_BASE, self).__init__(filename, filename_info, filetype_info) - platform_shortname = filename_info['platform_shortname'] + platform_shortname = filename_info["platform_shortname"] self.platform_name = PLATFORM_NAMES.get(platform_shortname.lower()) - self.nlines = self.nc['y'].size - self.ncols = self.nc['x'].size + self.nlines = self.nc["y"].size + self.ncols = self.nc["x"].size self.coords = {} @@ -67,28 +67,28 @@ def nc(self): nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, - chunks={'x': CHUNK_SIZE, 'y': CHUNK_SIZE}, ) + chunks={"x": CHUNK_SIZE, "y": CHUNK_SIZE}, ) except ValueError: nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, - chunks={'lon': CHUNK_SIZE, 'lat': CHUNK_SIZE}, ) + chunks={"lon": CHUNK_SIZE, "lat": CHUNK_SIZE}, ) nc = self._rename_dims(nc) return nc @staticmethod def _rename_dims(nc): - if 't' in nc.dims or 't' in nc.coords: - nc = nc.rename({'t': 'time'}) - if 'goes_lat_lon_projection' in nc: + if "t" in nc.dims or "t" in nc.coords: + nc = nc.rename({"t": "time"}) + if "goes_lat_lon_projection" in nc: with suppress(ValueError): - nc = nc.rename({'lon': 'x', 'lat': 'y'}) + nc = nc.rename({"lon": "x", "lat": "y"}) return nc @property def sensor(self): """Get sensor name for current file handler.""" - return 'abi' + return "abi" def __getitem__(self, item): """Wrap `self.nc[item]` for better floating point precision. @@ -110,21 +110,21 @@ def __getitem__(self, item): def _adjust_data(self, data, item): """Adjust data with typing, scaling and filling.""" - factor = data.attrs.get('scale_factor', 1) - offset = data.attrs.get('add_offset', 0) - fill = data.attrs.get('_FillValue') - unsigned = data.attrs.get('_Unsigned', None) + factor = data.attrs.get("scale_factor", 1) + offset = data.attrs.get("add_offset", 0) + fill = data.attrs.get("_FillValue") + unsigned = data.attrs.get("_Unsigned", None) def is_int(val): - return np.issubdtype(val.dtype, np.integer) if hasattr(val, 'dtype') else isinstance(val, int) + return np.issubdtype(val.dtype, np.integer) if hasattr(val, "dtype") else isinstance(val, int) # Ref. GOESR PUG-L1B-vol3, section 5.0.2 Unsigned Integer Processing - if unsigned is not None and unsigned.lower() == 'true': + if unsigned is not None and unsigned.lower() == "true": # cast the data from int to uint - data = data.astype('u%s' % data.dtype.itemsize) + data = data.astype("u%s" % data.dtype.itemsize) if fill is not None: - fill = fill.astype('u%s' % fill.dtype.itemsize) + fill = fill.astype("u%s" % fill.dtype.itemsize) if fill is not None: # Some backends (h5netcdf) may return attributes as shape (1,) # arrays rather than shape () scalars, which according to the netcdf @@ -138,7 +138,7 @@ def is_int(val): else: new_fill = np.nan data = data.where(data != fill, new_fill) - if factor != 1 and item in ('x', 'y'): + if factor != 1 and item in ("x", "y"): # be more precise with x/y coordinates # see get_area_def for more information data = data * np.round(float(factor), 6) + np.round(float(offset), 6) @@ -157,7 +157,7 @@ def _adjust_coords(self, data, item): # 'time' dimension causes issues in other processing # 'x_image' and 'y_image' are confusing to some users and unnecessary # 'x' and 'y' will be overwritten by base class AreaDefinition - for coord_name in ('x_image', 'y_image', 'time', 'x', 'y'): + for coord_name in ("x_image", "y_image", "time", "x", "y"): if coord_name in data.coords: data = data.drop_vars(coord_name) if item in data.coords: @@ -175,44 +175,44 @@ def get_dataset(self, key, info): def get_area_def(self, key): """Get the area definition of the data at hand.""" - if 'goes_imager_projection' in self.nc: + if "goes_imager_projection" in self.nc: return self._get_areadef_fixedgrid(key) - if 'goes_lat_lon_projection' in self.nc: + if "goes_lat_lon_projection" in self.nc: return self._get_areadef_latlon(key) - raise ValueError('Unsupported projection found in the dataset') + raise ValueError("Unsupported projection found in the dataset") def _get_areadef_latlon(self, key): """Get the area definition of the data at hand.""" projection = self.nc["goes_lat_lon_projection"] - a = projection.attrs['semi_major_axis'] - b = projection.attrs['semi_minor_axis'] - fi = projection.attrs['inverse_flattening'] - pm = projection.attrs['longitude_of_prime_meridian'] + a = projection.attrs["semi_major_axis"] + b = projection.attrs["semi_minor_axis"] + fi = projection.attrs["inverse_flattening"] + pm = projection.attrs["longitude_of_prime_meridian"] proj_ext = self.nc["geospatial_lat_lon_extent"] - w_lon = proj_ext.attrs['geospatial_westbound_longitude'] - e_lon = proj_ext.attrs['geospatial_eastbound_longitude'] - n_lat = proj_ext.attrs['geospatial_northbound_latitude'] - s_lat = proj_ext.attrs['geospatial_southbound_latitude'] + w_lon = proj_ext.attrs["geospatial_westbound_longitude"] + e_lon = proj_ext.attrs["geospatial_eastbound_longitude"] + n_lat = proj_ext.attrs["geospatial_northbound_latitude"] + s_lat = proj_ext.attrs["geospatial_southbound_latitude"] - lat_0 = proj_ext.attrs['geospatial_lat_center'] - lon_0 = proj_ext.attrs['geospatial_lon_center'] + lat_0 = proj_ext.attrs["geospatial_lat_center"] + lon_0 = proj_ext.attrs["geospatial_lon_center"] area_extent = (w_lon, s_lat, e_lon, n_lat) - proj_dict = {'proj': 'latlong', - 'lon_0': float(lon_0), - 'lat_0': float(lat_0), - 'a': float(a), - 'b': float(b), - 'fi': float(fi), - 'pm': float(pm)} + proj_dict = {"proj": "latlong", + "lon_0": float(lon_0), + "lat_0": float(lat_0), + "a": float(a), + "b": float(b), + "fi": float(fi), + "pm": float(pm)} ll_area_def = geometry.AreaDefinition( - self.nc.attrs.get('orbital_slot', 'abi_geos'), - self.nc.attrs.get('spatial_resolution', 'ABI file area'), - 'abi_latlon', + self.nc.attrs.get("orbital_slot", "abi_geos"), + self.nc.attrs.get("spatial_resolution", "ABI file area"), + "abi_latlon", proj_dict, self.ncols, self.nlines, @@ -231,17 +231,17 @@ def _get_areadef_fixedgrid(self, key): """ projection = self.nc["goes_imager_projection"] - a = projection.attrs['semi_major_axis'] - b = projection.attrs['semi_minor_axis'] - h = projection.attrs['perspective_point_height'] + a = projection.attrs["semi_major_axis"] + b = projection.attrs["semi_minor_axis"] + h = projection.attrs["perspective_point_height"] - lon_0 = projection.attrs['longitude_of_projection_origin'] - sweep_axis = projection.attrs['sweep_angle_axis'][0] + lon_0 = projection.attrs["longitude_of_projection_origin"] + sweep_axis = projection.attrs["sweep_angle_axis"][0] # compute x and y extents in m h = np.float64(h) - x = self['x'] - y = self['y'] + x = self["x"] + y = self["y"] x_l = x[0].values x_r = x[-1].values y_l = y[-1].values @@ -251,18 +251,18 @@ def _get_areadef_fixedgrid(self, key): area_extent = (x_l - x_half, y_l - y_half, x_r + x_half, y_u + y_half) area_extent = tuple(np.round(h * val, 6) for val in area_extent) - proj_dict = {'proj': 'geos', - 'lon_0': float(lon_0), - 'a': float(a), - 'b': float(b), - 'h': h, - 'units': 'm', - 'sweep': sweep_axis} + proj_dict = {"proj": "geos", + "lon_0": float(lon_0), + "a": float(a), + "b": float(b), + "h": h, + "units": "m", + "sweep": sweep_axis} fg_area_def = geometry.AreaDefinition( - self.nc.attrs.get('orbital_slot', 'abi_geos'), - self.nc.attrs.get('spatial_resolution', 'ABI file area'), - 'abi_fixed_grid', + self.nc.attrs.get("orbital_slot", "abi_geos"), + self.nc.attrs.get("spatial_resolution", "ABI file area"), + "abi_fixed_grid", proj_dict, self.ncols, self.nlines, @@ -273,19 +273,19 @@ def _get_areadef_fixedgrid(self, key): @property def start_time(self): """Start time of the current file's observations.""" - return datetime.strptime(self.nc.attrs['time_coverage_start'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["time_coverage_start"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """End time of the current file's observations.""" - return datetime.strptime(self.nc.attrs['time_coverage_end'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["time_coverage_end"], "%Y-%m-%dT%H:%M:%S.%fZ") def spatial_resolution_to_number(self): """Convert the 'spatial_resolution' global attribute to meters.""" - res = self.nc.attrs['spatial_resolution'].split(' ')[0] - if res.endswith('km'): + res = self.nc.attrs["spatial_resolution"].split(" ")[0] + if res.endswith("km"): res = int(float(res[:-2]) * 1000) - elif res.endswith('m'): + elif res.endswith("m"): res = int(res[:-1]) else: raise ValueError("Unexpected 'spatial_resolution' attribute '{}'".format(res)) diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py index dafdc8a373..3a22397cde 100644 --- a/satpy/readers/abi_l1b.py +++ b/satpy/readers/abi_l1b.py @@ -44,70 +44,70 @@ def __init__(self, filename, filename_info, filetype_info, clip_negative_radianc def get_dataset(self, key, info): """Load a dataset.""" - logger.debug('Reading in get_dataset %s.', key['name']) + logger.debug("Reading in get_dataset %s.", key["name"]) # For raw cal, don't apply scale and offset, return raw file counts - if key['calibration'] == 'counts': - radiances = self.nc['Rad'].copy() + if key["calibration"] == "counts": + radiances = self.nc["Rad"].copy() else: - radiances = self['Rad'] + radiances = self["Rad"] # mapping of calibration types to calibration functions cal_dictionary = { - 'reflectance': self._vis_calibrate, - 'brightness_temperature': self._ir_calibrate, - 'radiance': self._rad_calibrate, - 'counts': self._raw_calibrate, + "reflectance": self._vis_calibrate, + "brightness_temperature": self._ir_calibrate, + "radiance": self._rad_calibrate, + "counts": self._raw_calibrate, } try: - func = cal_dictionary[key['calibration']] + func = cal_dictionary[key["calibration"]] res = func(radiances) except KeyError: - raise ValueError("Unknown calibration '{}'".format(key['calibration'])) + raise ValueError("Unknown calibration '{}'".format(key["calibration"])) # convert to satpy standard units - if res.attrs['units'] == '1' and key['calibration'] != 'counts': + if res.attrs["units"] == "1" and key["calibration"] != "counts": res *= 100 - res.attrs['units'] = '%' + res.attrs["units"] = "%" self._adjust_attrs(res, key) return res def _adjust_attrs(self, data, key): - data.attrs.update({'platform_name': self.platform_name, - 'sensor': self.sensor}) + data.attrs.update({"platform_name": self.platform_name, + "sensor": self.sensor}) # Add orbital parameters projection = self.nc["goes_imager_projection"] - data.attrs['orbital_parameters'] = { - 'projection_longitude': float(projection.attrs['longitude_of_projection_origin']), - 'projection_latitude': float(projection.attrs['latitude_of_projection_origin']), - 'projection_altitude': float(projection.attrs['perspective_point_height']), - 'satellite_nominal_latitude': float(self['nominal_satellite_subpoint_lat']), - 'satellite_nominal_longitude': float(self['nominal_satellite_subpoint_lon']), - 'satellite_nominal_altitude': float(self['nominal_satellite_height']) * 1000., - 'yaw_flip': bool(self['yaw_flip_flag']), + data.attrs["orbital_parameters"] = { + "projection_longitude": float(projection.attrs["longitude_of_projection_origin"]), + "projection_latitude": float(projection.attrs["latitude_of_projection_origin"]), + "projection_altitude": float(projection.attrs["perspective_point_height"]), + "satellite_nominal_latitude": float(self["nominal_satellite_subpoint_lat"]), + "satellite_nominal_longitude": float(self["nominal_satellite_subpoint_lon"]), + "satellite_nominal_altitude": float(self["nominal_satellite_height"]) * 1000., + "yaw_flip": bool(self["yaw_flip_flag"]), } data.attrs.update(key.to_dict()) # remove attributes that could be confusing later # if calibration type is raw counts, we leave them in - if key['calibration'] != 'counts': - data.attrs.pop('_FillValue', None) - data.attrs.pop('scale_factor', None) - data.attrs.pop('add_offset', None) - data.attrs.pop('_Unsigned', None) - data.attrs.pop('ancillary_variables', None) # Can't currently load DQF + if key["calibration"] != "counts": + data.attrs.pop("_FillValue", None) + data.attrs.pop("scale_factor", None) + data.attrs.pop("add_offset", None) + data.attrs.pop("_Unsigned", None) + data.attrs.pop("ancillary_variables", None) # Can't currently load DQF # although we could compute these, we'd have to update in calibration - data.attrs.pop('valid_range', None) + data.attrs.pop("valid_range", None) # add in information from the filename that may be useful to the user - for attr in ('observation_type', 'scene_abbr', 'scan_mode', 'platform_shortname', 'suffix'): + for attr in ("observation_type", "scene_abbr", "scan_mode", "platform_shortname", "suffix"): if attr in self.filename_info: data.attrs[attr] = self.filename_info[attr] # copy global attributes to metadata - for attr in ('scene_id', 'orbital_slot', 'instrument_ID', 'production_site', 'timeline_ID'): + for attr in ("scene_id", "orbital_slot", "instrument_ID", "production_site", "timeline_ID"): data.attrs[attr] = self.nc.attrs.get(attr) # only include these if they are present - for attr in ('fusion_args',): + for attr in ("fusion_args",): if attr in self.nc.attrs: data.attrs[attr] = self.nc.attrs[attr] @@ -128,23 +128,23 @@ def _raw_calibrate(self, data): """ res = data res.attrs = data.attrs - res.attrs['units'] = '1' - res.attrs['long_name'] = 'Raw Counts' - res.attrs['standard_name'] = 'counts' + res.attrs["units"] = "1" + res.attrs["long_name"] = "Raw Counts" + res.attrs["standard_name"] = "counts" return res def _vis_calibrate(self, data): """Calibrate visible channels to reflectance.""" - solar_irradiance = self['esun'] + solar_irradiance = self["esun"] esd = self["earth_sun_distance_anomaly_in_AU"].astype(float) factor = np.pi * esd * esd / solar_irradiance res = data * factor res.attrs = data.attrs - res.attrs['units'] = '1' - res.attrs['long_name'] = 'Bidirectional Reflectance' - res.attrs['standard_name'] = 'toa_bidirectional_reflectance' + res.attrs["units"] = "1" + res.attrs["long_name"] = "Bidirectional Reflectance" + res.attrs["standard_name"] = "toa_bidirectional_reflectance" return res def _get_minimum_radiance(self, data): @@ -170,7 +170,7 @@ def _ir_calibrate(self, data): res = (fk2 / np.log(fk1 / data + 1) - bc1) / bc2 res.attrs = data.attrs - res.attrs['units'] = 'K' - res.attrs['long_name'] = 'Brightness Temperature' - res.attrs['standard_name'] = 'toa_brightness_temperature' + res.attrs["units"] = "K" + res.attrs["long_name"] = "Brightness Temperature" + res.attrs["standard_name"] = "toa_brightness_temperature" return res diff --git a/satpy/readers/abi_l2_nc.py b/satpy/readers/abi_l2_nc.py index d63ba354a6..2324d3e1fd 100644 --- a/satpy/readers/abi_l2_nc.py +++ b/satpy/readers/abi_l2_nc.py @@ -33,49 +33,49 @@ class NC_ABI_L2(NC_ABI_BASE): def get_dataset(self, key, info): """Load a dataset.""" - var = info['file_key'] - if self.filetype_info['file_type'] == 'abi_l2_mcmip': + var = info["file_key"] + if self.filetype_info["file_type"] == "abi_l2_mcmip": var += "_" + key["name"] - LOG.debug('Reading in get_dataset %s.', var) + LOG.debug("Reading in get_dataset %s.", var) variable = self[var] variable.attrs.update(key.to_dict()) self._update_data_arr_with_filename_attrs(variable) self._remove_problem_attrs(variable) # convert to satpy standard units - if variable.attrs['units'] == '1' and key['calibration'] == 'reflectance': + if variable.attrs["units"] == "1" and key["calibration"] == "reflectance": variable *= 100.0 - variable.attrs['units'] = '%' + variable.attrs["units"] = "%" return variable def _update_data_arr_with_filename_attrs(self, variable): - _units = variable.attrs['units'] if 'units' in variable.attrs else None + _units = variable.attrs["units"] if "units" in variable.attrs else None variable.attrs.update({ - 'platform_name': self.platform_name, - 'sensor': self.sensor, - 'units': _units, - 'orbital_parameters': { - 'satellite_nominal_latitude': float(self.nc['nominal_satellite_subpoint_lat']), - 'satellite_nominal_longitude': float(self.nc['nominal_satellite_subpoint_lon']), - 'satellite_nominal_altitude': float(self.nc['nominal_satellite_height']) * 1000., + "platform_name": self.platform_name, + "sensor": self.sensor, + "units": _units, + "orbital_parameters": { + "satellite_nominal_latitude": float(self.nc["nominal_satellite_subpoint_lat"]), + "satellite_nominal_longitude": float(self.nc["nominal_satellite_subpoint_lon"]), + "satellite_nominal_altitude": float(self.nc["nominal_satellite_height"]) * 1000., }, }) - if 'flag_meanings' in variable.attrs: - variable.attrs['flag_meanings'] = variable.attrs['flag_meanings'].split(' ') + if "flag_meanings" in variable.attrs: + variable.attrs["flag_meanings"] = variable.attrs["flag_meanings"].split(" ") # add in information from the filename that may be useful to the user - for attr in ('scene_abbr', 'scan_mode', 'platform_shortname'): + for attr in ("scene_abbr", "scan_mode", "platform_shortname"): variable.attrs[attr] = self.filename_info.get(attr) # add in information hardcoded in the filetype YAML - for attr in ('observation_type',): + for attr in ("observation_type",): if attr in self.filetype_info: variable.attrs[attr] = self.filetype_info[attr] # copy global attributes to metadata - for attr in ('scene_id', 'orbital_slot', 'instrument_ID', 'production_site', 'timeline_ID'): + for attr in ("scene_id", "orbital_slot", "instrument_ID", "production_site", "timeline_ID"): variable.attrs[attr] = self.nc.attrs.get(attr) @staticmethod @@ -83,13 +83,13 @@ def _remove_problem_attrs(variable): # remove attributes that could be confusing later if not np.issubdtype(variable.dtype, np.integer): # integer fields keep the _FillValue - variable.attrs.pop('_FillValue', None) - variable.attrs.pop('scale_factor', None) - variable.attrs.pop('add_offset', None) - variable.attrs.pop('valid_range', None) - variable.attrs.pop('_Unsigned', None) - variable.attrs.pop('valid_range', None) - variable.attrs.pop('ancillary_variables', None) # Can't currently load DQF + variable.attrs.pop("_FillValue", None) + variable.attrs.pop("scale_factor", None) + variable.attrs.pop("add_offset", None) + variable.attrs.pop("valid_range", None) + variable.attrs.pop("_Unsigned", None) + variable.attrs.pop("valid_range", None) + variable.attrs.pop("ancillary_variables", None) # Can't currently load DQF def available_datasets(self, configured_datasets=None): """Add resolution to configured datasets.""" @@ -98,12 +98,12 @@ def available_datasets(self, configured_datasets=None): # don't override what they've done if is_avail is not None: yield is_avail, ds_info - matches = self.file_type_matches(ds_info['file_type']) + matches = self.file_type_matches(ds_info["file_type"]) if matches: # we have this dataset resolution = self.spatial_resolution_to_number() new_info = ds_info.copy() - new_info.setdefault('resolution', resolution) + new_info.setdefault("resolution", resolution) yield True, ds_info elif is_avail is None: # we don't know what to do with this diff --git a/satpy/readers/acspo.py b/satpy/readers/acspo.py index 14c8038b63..8a8262af33 100644 --- a/satpy/readers/acspo.py +++ b/satpy/readers/acspo.py @@ -33,9 +33,9 @@ ROWS_PER_SCAN = { - 'modis': 10, - 'viirs': 16, - 'avhrr': None, + "modis": 10, + "viirs": 16, + "avhrr": None, } @@ -45,7 +45,7 @@ class ACSPOFileHandler(NetCDF4FileHandler): @property def platform_name(self): """Get satellite name for this file's data.""" - res = self['/attr/platform'] + res = self["/attr/platform"] if isinstance(res, np.ndarray): return str(res.astype(str)) return res @@ -53,7 +53,7 @@ def platform_name(self): @property def sensor_name(self): """Get instrument name for this file's data.""" - res = self['/attr/sensor'] + res = self["/attr/sensor"] if isinstance(res, np.ndarray): res = str(res.astype(str)) return res.lower() @@ -69,12 +69,12 @@ def get_shape(self, ds_id, ds_info): tuple: (rows, cols) """ - var_path = ds_info.get('file_key', '{}'.format(ds_id['name'])) - if var_path + '/shape' not in self: + var_path = ds_info.get("file_key", "{}".format(ds_id["name"])) + if var_path + "/shape" not in self: # loading a scalar value shape = 1 else: - shape = self[var_path + '/shape'] + shape = self[var_path + "/shape"] if len(shape) == 3: if shape[0] != 1: raise ValueError("Not sure how to load 3D Dataset with more than 1 time") @@ -88,49 +88,49 @@ def _parse_datetime(datestr): @property def start_time(self): """Get first observation time of data.""" - return self._parse_datetime(self['/attr/time_coverage_start']) + return self._parse_datetime(self["/attr/time_coverage_start"]) @property def end_time(self): """Get final observation time of data.""" - return self._parse_datetime(self['/attr/time_coverage_end']) + return self._parse_datetime(self["/attr/time_coverage_end"]) def get_metadata(self, dataset_id, ds_info): """Collect various metadata about the specified dataset.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) shape = self.get_shape(dataset_id, ds_info) - units = self[var_path + '/attr/units'] - info = getattr(self[var_path], 'attrs', {}) - standard_name = self[var_path + '/attr/standard_name'] - resolution = float(self['/attr/spatial_resolution'].split(' ')[0]) + units = self[var_path + "/attr/units"] + info = getattr(self[var_path], "attrs", {}) + standard_name = self[var_path + "/attr/standard_name"] + resolution = float(self["/attr/spatial_resolution"].split(" ")[0]) rows_per_scan = ROWS_PER_SCAN.get(self.sensor_name) or 0 info.update(dataset_id.to_dict()) info.update({ - 'shape': shape, - 'units': units, - 'platform_name': self.platform_name, - 'sensor': self.sensor_name, - 'standard_name': standard_name, - 'resolution': resolution, - 'rows_per_scan': rows_per_scan, - 'long_name': self.get(var_path + '/attr/long_name'), - 'comment': self.get(var_path + '/attr/comment'), + "shape": shape, + "units": units, + "platform_name": self.platform_name, + "sensor": self.sensor_name, + "standard_name": standard_name, + "resolution": resolution, + "rows_per_scan": rows_per_scan, + "long_name": self.get(var_path + "/attr/long_name"), + "comment": self.get(var_path + "/attr/comment"), }) return info def get_dataset(self, dataset_id, ds_info): """Load data array and metadata from file on disk.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) metadata = self.get_metadata(dataset_id, ds_info) - shape = metadata['shape'] - file_shape = self[var_path + '/shape'] - metadata['shape'] = shape + shape = metadata["shape"] + file_shape = self[var_path + "/shape"] + metadata["shape"] = shape - valid_min = self[var_path + '/attr/valid_min'] - valid_max = self[var_path + '/attr/valid_max'] + valid_min = self[var_path + "/attr/valid_min"] + valid_max = self[var_path + "/attr/valid_max"] # no need to check fill value since we are using valid min/max - scale_factor = self.get(var_path + '/attr/scale_factor') - add_offset = self.get(var_path + '/attr/add_offset') + scale_factor = self.get(var_path + "/attr/scale_factor") + add_offset = self.get(var_path + "/attr/add_offset") data = self[var_path] data = data.rename({"ni": "x", "nj": "y"}) @@ -141,15 +141,15 @@ def get_dataset(self, dataset_id, ds_info): if scale_factor is not None: data = data * scale_factor + add_offset - if ds_info.get('cloud_clear', False): + if ds_info.get("cloud_clear", False): # clear-sky if bit 15-16 are 00 - clear_sky_mask = (self['l2p_flags'][0] & 0b1100000000000000) != 0 + clear_sky_mask = (self["l2p_flags"][0] & 0b1100000000000000) != 0 clear_sky_mask = clear_sky_mask.rename({"ni": "x", "nj": "y"}) data = data.where(~clear_sky_mask) data.attrs.update(metadata) # Remove these attributes since they are no longer valid and can cause invalid value filling. - data.attrs.pop('_FillValue', None) - data.attrs.pop('valid_max', None) - data.attrs.pop('valid_min', None) + data.attrs.pop("_FillValue", None) + data.attrs.pop("valid_max", None) + data.attrs.pop("valid_min", None) return data diff --git a/satpy/readers/agri_l1.py b/satpy/readers/agri_l1.py index 9612d016cd..381880cd5c 100644 --- a/satpy/readers/agri_l1.py +++ b/satpy/readers/agri_l1.py @@ -36,21 +36,21 @@ class HDF_AGRI_L1(FY4Base): def __init__(self, filename, filename_info, filetype_info): """Init filehandler.""" super(HDF_AGRI_L1, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'AGRI' + self.sensor = "AGRI" def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" - ds_name = dataset_id['name'] - logger.debug('Reading in get_dataset %s.', ds_name) - file_key = ds_info.get('file_key', ds_name) - if self.PLATFORM_ID == 'FY-4B': + ds_name = dataset_id["name"] + logger.debug("Reading in get_dataset %s.", ds_name) + file_key = ds_info.get("file_key", ds_name) + if self.PLATFORM_ID == "FY-4B": if self.CHANS_ID in file_key: - file_key = f'Data/{file_key}' + file_key = f"Data/{file_key}" elif self.SUN_ID in file_key or self.SAT_ID in file_key: - file_key = f'Navigation/{file_key}' + file_key = f"Navigation/{file_key}" data = self.get(file_key) if data.ndim >= 2: - data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'}) + data = data.rename({data.dims[-2]: "y", data.dims[-1]: "x"}) data = self.calibrate(data, ds_info, ds_name, file_key) self.adjust_attrs(data, ds_info) @@ -59,15 +59,15 @@ def get_dataset(self, dataset_id, ds_info): def adjust_attrs(self, data, ds_info): """Adjust the attrs of the data.""" - satname = self.PLATFORM_NAMES.get(self['/attr/Satellite Name'], self['/attr/Satellite Name']) - data.attrs.update({'platform_name': satname, - 'sensor': self['/attr/Sensor Identification Code'].lower(), - 'orbital_parameters': { - 'satellite_nominal_latitude': self['/attr/NOMCenterLat'].item(), - 'satellite_nominal_longitude': self['/attr/NOMCenterLon'].item(), - 'satellite_nominal_altitude': self['/attr/NOMSatHeight'].item()}}) + satname = self.PLATFORM_NAMES.get(self["/attr/Satellite Name"], self["/attr/Satellite Name"]) + data.attrs.update({"platform_name": satname, + "sensor": self["/attr/Sensor Identification Code"].lower(), + "orbital_parameters": { + "satellite_nominal_latitude": self["/attr/NOMCenterLat"].item(), + "satellite_nominal_longitude": self["/attr/NOMCenterLon"].item(), + "satellite_nominal_altitude": self["/attr/NOMSatHeight"].item()}}) data.attrs.update(ds_info) # remove attributes that could be confusing later - data.attrs.pop('FillValue', None) - data.attrs.pop('Intercept', None) - data.attrs.pop('Slope', None) + data.attrs.pop("FillValue", None) + data.attrs.pop("Intercept", None) + data.attrs.pop("Slope", None) diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py index dffafaa97b..8e14d049b9 100644 --- a/satpy/readers/ahi_hsd.py +++ b/satpy/readers/ahi_hsd.py @@ -84,7 +84,7 @@ "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "16") -logger = logging.getLogger('ahi_hsd') +logger = logging.getLogger("ahi_hsd") # Basic information block: _BASIC_INFO_TYPE = np.dtype([("hblock_number", "u1"), @@ -350,14 +350,14 @@ class AHIHSDFileHandler(BaseFileHandler): """ def __init__(self, filename, filename_info, filetype_info, - mask_space=True, calib_mode='update', + mask_space=True, calib_mode="update", user_calibration=None, round_actual_position=True): """Initialize the reader.""" super(AHIHSDFileHandler, self).__init__(filename, filename_info, filetype_info) self.is_zipped = False - self._unzipped = unzip_file(self.filename, prefix=str(filename_info['segment']).zfill(2)) + self._unzipped = unzip_file(self.filename, prefix=str(filename_info["segment"]).zfill(2)) # Assume file is not zipped if self._unzipped: # But if it is, set the filename to point to unzipped temp file @@ -365,14 +365,14 @@ def __init__(self, filename, filename_info, filetype_info, self.filename = self._unzipped self.channels = dict([(i, None) for i in AHI_CHANNEL_NAMES]) - self.units = dict([(i, 'counts') for i in AHI_CHANNEL_NAMES]) + self.units = dict([(i, "counts") for i in AHI_CHANNEL_NAMES]) self._data = dict([(i, None) for i in AHI_CHANNEL_NAMES]) self._header = dict([(i, None) for i in AHI_CHANNEL_NAMES]) self.lons = None self.lats = None - self.segment_number = filename_info['segment'] - self.total_segments = filename_info['total_segments'] + self.segment_number = filename_info["segment"] + self.total_segments = filename_info["total_segments"] with open(self.filename) as fd: self.basic_info = np.fromfile(fd, @@ -387,14 +387,14 @@ def __init__(self, filename, filename_info, filetype_info, self.nav_info = np.fromfile(fd, dtype=_NAV_INFO_TYPE, count=1)[0] - self.platform_name = np2str(self.basic_info['satellite']) - self.observation_area = np2str(self.basic_info['observation_area']) - self.sensor = 'ahi' + self.platform_name = np2str(self.basic_info["satellite"]) + self.observation_area = np2str(self.basic_info["observation_area"]) + self.sensor = "ahi" self.mask_space = mask_space - self.band_name = filetype_info['file_type'][4:].upper() - calib_mode_choices = ('NOMINAL', 'UPDATE') + self.band_name = filetype_info["file_type"][4:].upper() + calib_mode_choices = ("NOMINAL", "UPDATE") if calib_mode.upper() not in calib_mode_choices: - raise ValueError('Invalid calibration mode: {}. Choose one of {}'.format( + raise ValueError("Invalid calibration mode: {}. Choose one of {}".format( calib_mode, calib_mode_choices)) self.calib_mode = calib_mode.upper() @@ -419,12 +419,12 @@ def end_time(self): @property def observation_start_time(self): """Get the observation start time.""" - return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info['observation_start_time'])) + return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_start_time"])) @property def observation_end_time(self): """Get the observation end time.""" - return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info['observation_end_time'])) + return datetime(1858, 11, 17) + timedelta(days=float(self.basic_info["observation_end_time"])) @property def nominal_start_time(self): @@ -456,7 +456,7 @@ def _modify_observation_time_for_nominal(self, observation_time): 2.5 minutes apart, then the result should be 13:32:30. """ - timeline = "{:04d}".format(self.basic_info['observation_timeline'][0]) + timeline = "{:04d}".format(self.basic_info["observation_timeline"][0]) if not self._is_valid_timeline(timeline): warnings.warn( "Observation timeline is fill value, not rounding observation time.", @@ -464,10 +464,10 @@ def _modify_observation_time_for_nominal(self, observation_time): ) return observation_time - if self.observation_area == 'FLDK': + if self.observation_area == "FLDK": dt = 0 else: - observation_frequency_seconds = {'JP': 150, 'R3': 150, 'R4': 30, 'R5': 30}[self.observation_area[:2]] + observation_frequency_seconds = {"JP": 150, "R3": 150, "R4": 30, "R5": 30}[self.observation_area[:2]] dt = observation_frequency_seconds * (int(self.observation_area[2:]) - 1) return observation_time.replace( @@ -490,25 +490,25 @@ def get_area_def(self, dsid): def _get_area_def(self): pdict = {} - pdict['cfac'] = np.uint32(self.proj_info['CFAC']) - pdict['lfac'] = np.uint32(self.proj_info['LFAC']) - pdict['coff'] = np.float32(self.proj_info['COFF']) - pdict['loff'] = -np.float32(self.proj_info['LOFF']) + 1 - pdict['a'] = float(self.proj_info['earth_equatorial_radius'] * 1000) - pdict['h'] = float(self.proj_info['distance_from_earth_center'] * 1000 - pdict['a']) - pdict['b'] = float(self.proj_info['earth_polar_radius'] * 1000) - pdict['ssp_lon'] = float(self.proj_info['sub_lon']) - pdict['nlines'] = int(self.data_info['number_of_lines']) - pdict['ncols'] = int(self.data_info['number_of_columns']) - pdict['scandir'] = 'N2S' - - pdict['loff'] = pdict['loff'] + (self.segment_number * pdict['nlines']) + pdict["cfac"] = np.uint32(self.proj_info["CFAC"]) + pdict["lfac"] = np.uint32(self.proj_info["LFAC"]) + pdict["coff"] = np.float32(self.proj_info["COFF"]) + pdict["loff"] = -np.float32(self.proj_info["LOFF"]) + 1 + pdict["a"] = float(self.proj_info["earth_equatorial_radius"] * 1000) + pdict["h"] = float(self.proj_info["distance_from_earth_center"] * 1000 - pdict["a"]) + pdict["b"] = float(self.proj_info["earth_polar_radius"] * 1000) + pdict["ssp_lon"] = float(self.proj_info["sub_lon"]) + pdict["nlines"] = int(self.data_info["number_of_lines"]) + pdict["ncols"] = int(self.data_info["number_of_columns"]) + pdict["scandir"] = "N2S" + + pdict["loff"] = pdict["loff"] + (self.segment_number * pdict["nlines"]) aex = get_area_extent(pdict) - pdict['a_name'] = self.observation_area - pdict['a_desc'] = "AHI {} area".format(self.observation_area) - pdict['p_id'] = f'geosh{self.basic_info["satellite"][0].decode()[-1]}' + pdict["a_name"] = self.observation_area + pdict["a_desc"] = "AHI {} area".format(self.observation_area) + pdict["p_id"] = f'geosh{self.basic_info["satellite"][0].decode()[-1]}' return get_area_definition(pdict, aex) @@ -526,99 +526,99 @@ def _read_header(self, fp_): header = {} fpos = 0 - header['block1'] = np.fromfile( + header["block1"] = np.fromfile( fp_, dtype=_BASIC_INFO_TYPE, count=1) - fpos = fpos + int(header['block1']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block1') + fpos = fpos + int(header["block1"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block1") fp_.seek(fpos, 0) header["block2"] = np.fromfile(fp_, dtype=_DATA_INFO_TYPE, count=1) - fpos = fpos + int(header['block2']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block2') + fpos = fpos + int(header["block2"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block2") fp_.seek(fpos, 0) header["block3"] = np.fromfile(fp_, dtype=_PROJ_INFO_TYPE, count=1) - fpos = fpos + int(header['block3']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block3') + fpos = fpos + int(header["block3"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block3") fp_.seek(fpos, 0) header["block4"] = np.fromfile(fp_, dtype=_NAV_INFO_TYPE, count=1) - fpos = fpos + int(header['block4']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block4') + fpos = fpos + int(header["block4"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block4") fp_.seek(fpos, 0) header["block5"] = np.fromfile(fp_, dtype=_CAL_INFO_TYPE, count=1) logger.debug("Band number = " + - str(header["block5"]['band_number'][0])) - logger.debug('Time_interval: %s - %s', + str(header["block5"]["band_number"][0])) + logger.debug("Time_interval: %s - %s", str(self.start_time), str(self.end_time)) - band_number = header["block5"]['band_number'][0] + band_number = header["block5"]["band_number"][0] if band_number < 7: cal = np.fromfile(fp_, dtype=_VISCAL_INFO_TYPE, count=1) else: cal = np.fromfile(fp_, dtype=_IRCAL_INFO_TYPE, count=1) - fpos = fpos + int(header['block5']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block5') + fpos = fpos + int(header["block5"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block5") fp_.seek(fpos, 0) - header['calibration'] = cal + header["calibration"] = cal header["block6"] = np.fromfile( fp_, dtype=_INTER_CALIBRATION_INFO_TYPE, count=1) - fpos = fpos + int(header['block6']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block6') + fpos = fpos + int(header["block6"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block6") fp_.seek(fpos, 0) header["block7"] = np.fromfile( fp_, dtype=_SEGMENT_INFO_TYPE, count=1) - fpos = fpos + int(header['block7']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block7') + fpos = fpos + int(header["block7"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block7") fp_.seek(fpos, 0) header["block8"] = np.fromfile( fp_, dtype=_NAVIGATION_CORRECTION_INFO_TYPE, count=1) # 8 The navigation corrections: - ncorrs = header["block8"]['numof_correction_info_data'][0] + ncorrs = header["block8"]["numof_correction_info_data"][0] corrections = [] for _i in range(ncorrs): corrections.append(np.fromfile(fp_, dtype=_NAVIGATION_CORRECTION_SUBINFO_TYPE, count=1)) - fpos = fpos + int(header['block8']['blocklength']) - self._check_fpos(fp_, fpos, 40, 'block8') + fpos = fpos + int(header["block8"]["blocklength"]) + self._check_fpos(fp_, fpos, 40, "block8") fp_.seek(fpos, 0) - header['navigation_corrections'] = corrections + header["navigation_corrections"] = corrections header["block9"] = np.fromfile(fp_, dtype=_OBSERVATION_TIME_INFO_TYPE, count=1) - numobstimes = header["block9"]['number_of_observation_times'][0] + numobstimes = header["block9"]["number_of_observation_times"][0] lines_and_times = [] for _i in range(numobstimes): lines_and_times.append(np.fromfile(fp_, dtype=_OBSERVATION_LINE_TIME_INFO_TYPE, count=1)) - header['observation_time_information'] = lines_and_times - fpos = fpos + int(header['block9']['blocklength']) - self._check_fpos(fp_, fpos, 40, 'block9') + header["observation_time_information"] = lines_and_times + fpos = fpos + int(header["block9"]["blocklength"]) + self._check_fpos(fp_, fpos, 40, "block9") fp_.seek(fpos, 0) header["block10"] = np.fromfile(fp_, dtype=_ERROR_INFO_TYPE, count=1) num_err_info_data = header["block10"][ - 'number_of_error_info_data'][0] + "number_of_error_info_data"][0] err_info_data = [] for _i in range(num_err_info_data): err_info_data.append(np.fromfile(fp_, dtype=_ERROR_LINE_INFO_TYPE, count=1)) - header['error_information_data'] = err_info_data - fpos = fpos + int(header['block10']['blocklength']) - self._check_fpos(fp_, fpos, 40, 'block10') + header["error_information_data"] = err_info_data + fpos = fpos + int(header["block10"]["blocklength"]) + self._check_fpos(fp_, fpos, 40, "block10") fp_.seek(fpos, 0) header["block11"] = np.fromfile(fp_, dtype=_SPARE_TYPE, count=1) - fpos = fpos + int(header['block11']['blocklength']) - self._check_fpos(fp_, fpos, 0, 'block11') + fpos = fpos + int(header["block11"]["blocklength"]) + self._check_fpos(fp_, fpos, 0, "block11") fp_.seek(fpos, 0) return header def _read_data(self, fp_, header, resolution): """Read data block.""" - nlines = int(header["block2"]['number_of_lines'][0]) - ncols = int(header["block2"]['number_of_columns'][0]) + nlines = int(header["block2"]["number_of_lines"][0]) + ncols = int(header["block2"]["number_of_columns"][0]) chunks = normalize_low_res_chunks( ("auto", "auto"), (nlines, ncols), @@ -628,13 +628,13 @@ def _read_data(self, fp_, header, resolution): np.float32, ) return da.from_array(np.memmap(self.filename, offset=fp_.tell(), - dtype=' no temperature data = da.where(data == 0, np.float32(np.nan), data) - cwl = self._header['block5']["central_wave_length"][0] * 1e-6 - c__ = self._header['calibration']["speed_of_light"][0] - h__ = self._header['calibration']["planck_constant"][0] - k__ = self._header['calibration']["boltzmann_constant"][0] + cwl = self._header["block5"]["central_wave_length"][0] * 1e-6 + c__ = self._header["calibration"]["speed_of_light"][0] + h__ = self._header["calibration"]["planck_constant"][0] + k__ = self._header["calibration"]["boltzmann_constant"][0] a__ = (h__ * c__) / (k__ * cwl) b__ = ((2 * h__ * c__ ** 2) / (data * 1.0e6 * cwl ** 5)) + 1 Te_ = a__ / da.log(b__) - c0_ = self._header['calibration']["c0_rad2tb_conversion"][0] - c1_ = self._header['calibration']["c1_rad2tb_conversion"][0] - c2_ = self._header['calibration']["c2_rad2tb_conversion"][0] + c0_ = self._header["calibration"]["c0_rad2tb_conversion"][0] + c1_ = self._header["calibration"]["c1_rad2tb_conversion"][0] + c2_ = self._header["calibration"]["c2_rad2tb_conversion"][0] return (c0_ + c1_ * Te_ + c2_ * Te_ ** 2).clip(0) diff --git a/satpy/readers/ahi_l1b_gridded_bin.py b/satpy/readers/ahi_l1b_gridded_bin.py index 0270015950..33289aee11 100644 --- a/satpy/readers/ahi_l1b_gridded_bin.py +++ b/satpy/readers/ahi_l1b_gridded_bin.py @@ -47,32 +47,32 @@ CHUNK_SIZE = get_legacy_chunk_size() # Hardcoded address of the reflectance and BT look-up tables -AHI_REMOTE_LUTS = 'http://www.cr.chiba-u.jp/databases/GEO/H8_9/FD/count2tbb_v102.tgz' +AHI_REMOTE_LUTS = "http://www.cr.chiba-u.jp/databases/GEO/H8_9/FD/count2tbb_v102.tgz" # Full disk image sizes for each spatial resolution -AHI_FULLDISK_SIZES = {0.005: {'x_size': 24000, - 'y_size': 24000}, - 0.01: {'x_size': 12000, - 'y_size': 12000}, - 0.02: {'x_size': 6000, - 'y_size': 6000}} +AHI_FULLDISK_SIZES = {0.005: {"x_size": 24000, + "y_size": 24000}, + 0.01: {"x_size": 12000, + "y_size": 12000}, + 0.02: {"x_size": 6000, + "y_size": 6000}} # Geographic extent of the full disk area in degrees AHI_FULLDISK_EXTENT = [85., -60., 205., 60.] # Resolutions of each channel type -AHI_CHANNEL_RES = {'vis': 0.01, - 'ext': 0.005, - 'sir': 0.02, - 'tir': 0.02} +AHI_CHANNEL_RES = {"vis": 0.01, + "ext": 0.005, + "sir": 0.02, + "tir": 0.02} # List of LUT filenames -AHI_LUT_NAMES = ['ext.01', 'vis.01', 'vis.02', 'vis.03', - 'sir.01', 'sir.02', 'tir.01', 'tir.02', - 'tir.03', 'tir.04', 'tir.05', 'tir.06', - 'tir.07', 'tir.08', 'tir.09', 'tir.10'] +AHI_LUT_NAMES = ["ext.01", "vis.01", "vis.02", "vis.03", + "sir.01", "sir.02", "tir.01", "tir.02", + "tir.03", "tir.04", "tir.05", "tir.06", + "tir.07", "tir.08", "tir.09", "tir.10"] -logger = logging.getLogger('ahi_grid') +logger = logging.getLogger("ahi_grid") class AHIGriddedFileHandler(BaseFileHandler): @@ -99,19 +99,19 @@ def __init__(self, filename, filename_info, filetype_info): # But if it is, set the filename to point to unzipped temp file self.filename = self._unzipped # Get the band name, needed for finding area and dimensions - self.product_name = filetype_info['file_type'] - self.areaname = filename_info['area'] - self.sensor = 'ahi' + self.product_name = filetype_info["file_type"] + self.areaname = filename_info["area"] + self.sensor = "ahi" self.res = AHI_CHANNEL_RES[self.product_name[:3]] - if self.areaname == 'fld': - self.nlines = AHI_FULLDISK_SIZES[self.res]['y_size'] - self.ncols = AHI_FULLDISK_SIZES[self.res]['x_size'] + if self.areaname == "fld": + self.nlines = AHI_FULLDISK_SIZES[self.res]["y_size"] + self.ncols = AHI_FULLDISK_SIZES[self.res]["x_size"] else: raise NotImplementedError("Only full disk data is supported.") # Set up directory path for the LUTs - app_dirs = AppDirs('ahi_gridded_luts', 'satpy', '1.0.2') - self.lut_dir = os.path.expanduser(app_dirs.user_data_dir) + '/' + app_dirs = AppDirs("ahi_gridded_luts", "satpy", "1.0.2") + self.lut_dir = os.path.expanduser(app_dirs.user_data_dir) + "/" self.area = None def __del__(self): @@ -149,7 +149,7 @@ def _download_luts(file_name): # Set up an connection and download with urllib.request.urlopen(AHI_REMOTE_LUTS) as response: # nosec - with open(file_name, 'wb') as out_file: + with open(file_name, "wb") as out_file: shutil.copyfileobj(response, out_file) @staticmethod @@ -174,14 +174,14 @@ def _get_luts(self): logger.info("Download AHI LUTs files and store in directory %s", self.lut_dir) tempdir = config["tmp_dir"] - fname = os.path.join(tempdir, 'tmp.tgz') + fname = os.path.join(tempdir, "tmp.tgz") # Download the LUTs self._download_luts(fname) # The file is tarred, untar and remove the downloaded file self._untar_luts(fname, tempdir) - lut_dl_dir = os.path.join(tempdir, 'count2tbb_v102/') + lut_dl_dir = os.path.join(tempdir, "count2tbb_v102/") # Loop over the LUTs and copy to the correct location for lutfile in AHI_LUT_NAMES: @@ -198,16 +198,16 @@ def get_area_def(self, dsid): This is fixed, but not defined in the file. So we must generate it ourselves with some assumptions. """ - if self.areaname == 'fld': + if self.areaname == "fld": area_extent = AHI_FULLDISK_EXTENT else: raise NotImplementedError("Reader only supports full disk data.") - proj_param = 'EPSG:4326' + proj_param = "EPSG:4326" - area = geometry.AreaDefinition('gridded_himawari', - 'A gridded Himawari area', - 'longlat', + area = geometry.AreaDefinition("gridded_himawari", + "A gridded Himawari area", + "longlat", proj_param, self.ncols, self.nlines, @@ -220,9 +220,9 @@ def _read_data(self, fp_): """Read raw binary data from file.""" return da.from_array(np.memmap(self.filename, offset=fp_.tell(), - dtype='>u2', + dtype=">u2", shape=(self.nlines, self.ncols), - mode='r'), + mode="r"), chunks=CHUNK_SIZE) def read_band(self, key, info): @@ -231,26 +231,26 @@ def read_band(self, key, info): res = self._read_data(fp_) # Calibrate - res = self.calibrate(res, key['calibration']) + res = self.calibrate(res, key["calibration"]) # Update metadata new_info = dict( - units=info['units'], - standard_name=info['standard_name'], - wavelength=info['wavelength'], - resolution=info['resolution'], + units=info["units"], + standard_name=info["standard_name"], + wavelength=info["wavelength"], + resolution=info["resolution"], id=key, - name=key['name'], + name=key["name"], sensor=self.sensor, ) - res = xr.DataArray(res, attrs=new_info, dims=['y', 'x']) + res = xr.DataArray(res, attrs=new_info, dims=["y", "x"]) return res def calibrate(self, data, calib): """Calibrate the data.""" - if calib == 'counts': + if calib == "counts": return data - if calib == 'reflectance' or calib == 'brightness_temperature': + if calib == "reflectance" or calib == "brightness_temperature": return self._calibrate(data) raise NotImplementedError("ERROR: Unsupported calibration.", "Only counts, reflectance and ", diff --git a/satpy/readers/ahi_l2_nc.py b/satpy/readers/ahi_l2_nc.py index 5159931819..17823fed1e 100644 --- a/satpy/readers/ahi_l2_nc.py +++ b/satpy/readers/ahi_l2_nc.py @@ -54,7 +54,7 @@ logger = logging.getLogger(__name__) -EXPECTED_DATA_AREA = 'Full Disk' +EXPECTED_DATA_AREA = "Full Disk" class HIML2NCFileHandler(BaseFileHandler): @@ -69,39 +69,39 @@ def __init__(self, filename, filename_info, filetype_info): chunks={"xc": "auto", "yc": "auto"}) # Check that file is a full disk scene, we don't know the area for anything else - if self.nc.attrs['cdm_data_type'] != EXPECTED_DATA_AREA: - raise ValueError('File is not a full disk scene') - - self.sensor = self.nc.attrs['instrument_name'].lower() - self.nlines = self.nc.dims['Columns'] - self.ncols = self.nc.dims['Rows'] - self.platform_name = self.nc.attrs['satellite_name'] - self.platform_shortname = filename_info['platform'] + if self.nc.attrs["cdm_data_type"] != EXPECTED_DATA_AREA: + raise ValueError("File is not a full disk scene") + + self.sensor = self.nc.attrs["instrument_name"].lower() + self.nlines = self.nc.dims["Columns"] + self.ncols = self.nc.dims["Rows"] + self.platform_name = self.nc.attrs["satellite_name"] + self.platform_shortname = filename_info["platform"] self._meta = None @property def start_time(self): """Start timestamp of the dataset.""" - dt = self.nc.attrs['time_coverage_start'] - return datetime.strptime(dt, '%Y-%m-%dT%H:%M:%SZ') + dt = self.nc.attrs["time_coverage_start"] + return datetime.strptime(dt, "%Y-%m-%dT%H:%M:%SZ") @property def end_time(self): """End timestamp of the dataset.""" - dt = self.nc.attrs['time_coverage_end'] - return datetime.strptime(dt, '%Y-%m-%dT%H:%M:%SZ') + dt = self.nc.attrs["time_coverage_end"] + return datetime.strptime(dt, "%Y-%m-%dT%H:%M:%SZ") def get_dataset(self, key, info): """Load a dataset.""" - var = info['file_key'] - logger.debug('Reading in get_dataset %s.', var) + var = info["file_key"] + logger.debug("Reading in get_dataset %s.", var) variable = self.nc[var] # Data has 'Latitude' and 'Longitude' coords, these must be replaced. - variable = variable.rename({'Rows': 'y', 'Columns': 'x'}) + variable = variable.rename({"Rows": "y", "Columns": "x"}) - variable = variable.drop('Latitude') - variable = variable.drop('Longitude') + variable = variable.drop("Latitude") + variable = variable.drop("Longitude") variable.attrs.update(key.to_dict()) return variable @@ -117,20 +117,20 @@ def get_area_def(self, dsid): return self.area def _get_area_def(self): - logger.info('The AHI L2 cloud products do not have the metadata required to produce an area definition.' - ' Assuming standard Himawari-8/9 full disk projection.') + logger.info("The AHI L2 cloud products do not have the metadata required to produce an area definition." + " Assuming standard Himawari-8/9 full disk projection.") # Basic check to ensure we're processing a full disk (2km) scene.n if self.nlines != 5500 or self.ncols != 5500: raise ValueError("Input L2 file is not a full disk Himawari scene. Only full disk data is supported.") - pdict = {'cfac': 20466275, 'lfac': 20466275, 'coff': 2750.5, 'loff': 2750.5, 'a': 6378137.0, 'h': 35785863.0, - 'b': 6356752.3, 'ssp_lon': 140.7, 'nlines': self.nlines, 'ncols': self.ncols, 'scandir': 'N2S'} + pdict = {"cfac": 20466275, "lfac": 20466275, "coff": 2750.5, "loff": 2750.5, "a": 6378137.0, "h": 35785863.0, + "b": 6356752.3, "ssp_lon": 140.7, "nlines": self.nlines, "ncols": self.ncols, "scandir": "N2S"} aex = get_area_extent(pdict) - pdict['a_name'] = 'Himawari_Area' - pdict['a_desc'] = "AHI Full Disk area" - pdict['p_id'] = f'geos{self.platform_shortname}' + pdict["a_name"] = "Himawari_Area" + pdict["a_desc"] = "AHI Full Disk area" + pdict["p_id"] = f"geos{self.platform_shortname}" return get_area_definition(pdict, aex) diff --git a/satpy/readers/ami_l1b.py b/satpy/readers/ami_l1b.py index 9adeaf76f1..db8c8444d8 100644 --- a/satpy/readers/ami_l1b.py +++ b/satpy/readers/ami_l1b.py @@ -36,8 +36,8 @@ CHUNK_SIZE = get_legacy_chunk_size() PLATFORM_NAMES = { - 'GK-2A': 'GEO-KOMPSAT-2A', - 'GK-2B': 'GEO-KOMPSAT-2B', + "GK-2A": "GEO-KOMPSAT-2A", + "GK-2B": "GEO-KOMPSAT-2B", } @@ -90,7 +90,7 @@ class AMIL1bNetCDF(BaseFileHandler): """ def __init__(self, filename, filename_info, filetype_info, - calib_mode='PYSPECTRAL', allow_conditional_pixels=False, + calib_mode="PYSPECTRAL", allow_conditional_pixels=False, user_calibration=None): """Open the NetCDF file with xarray and prepare the Dataset for reading.""" super(AMIL1bNetCDF, self).__init__(filename, filename_info, filetype_info) @@ -98,17 +98,17 @@ def __init__(self, filename, filename_info, filetype_info, self.nc = xr.open_dataset(f_obj, decode_cf=True, mask_and_scale=False, - chunks={'dim_image_x': CHUNK_SIZE, 'dim_image_y': CHUNK_SIZE}) - self.nc = self.nc.rename({'dim_image_x': 'x', 'dim_image_y': 'y'}) + chunks={"dim_image_x": CHUNK_SIZE, "dim_image_y": CHUNK_SIZE}) + self.nc = self.nc.rename({"dim_image_x": "x", "dim_image_y": "y"}) - platform_shortname = self.nc.attrs['satellite_name'] + platform_shortname = self.nc.attrs["satellite_name"] self.platform_name = PLATFORM_NAMES.get(platform_shortname) - self.sensor = 'ami' - self.band_name = filetype_info['file_type'].upper() + self.sensor = "ami" + self.band_name = filetype_info["file_type"].upper() self.allow_conditional_pixels = allow_conditional_pixels - calib_mode_choices = ('FILE', 'PYSPECTRAL', 'GSICS') + calib_mode_choices = ("FILE", "PYSPECTRAL", "GSICS") if calib_mode.upper() not in calib_mode_choices: - raise ValueError('Invalid calibration mode: {}. Choose one of {}'.format( + raise ValueError("Invalid calibration mode: {}. Choose one of {}".format( calib_mode, calib_mode_choices)) self.calib_mode = calib_mode.upper() @@ -118,36 +118,36 @@ def __init__(self, filename, filename_info, filetype_info, def start_time(self): """Get observation start time.""" base = datetime(2000, 1, 1, 12, 0, 0) - return base + timedelta(seconds=self.nc.attrs['observation_start_time']) + return base + timedelta(seconds=self.nc.attrs["observation_start_time"]) @property def end_time(self): """Get observation end time.""" base = datetime(2000, 1, 1, 12, 0, 0) - return base + timedelta(seconds=self.nc.attrs['observation_end_time']) + return base + timedelta(seconds=self.nc.attrs["observation_end_time"]) def get_area_def(self, dsid): """Get area definition for this file.""" pdict = {} - pdict['a'] = self.nc.attrs['earth_equatorial_radius'] - pdict['b'] = self.nc.attrs['earth_polar_radius'] - pdict['h'] = self.nc.attrs['nominal_satellite_height'] - pdict['a'] - pdict['ssp_lon'] = self.nc.attrs['sub_longitude'] * 180 / np.pi # it's in radians? - pdict['ncols'] = self.nc.attrs['number_of_columns'] - pdict['nlines'] = self.nc.attrs['number_of_lines'] - obs_mode = self.nc.attrs['observation_mode'] - resolution = self.nc.attrs['channel_spatial_resolution'] + pdict["a"] = self.nc.attrs["earth_equatorial_radius"] + pdict["b"] = self.nc.attrs["earth_polar_radius"] + pdict["h"] = self.nc.attrs["nominal_satellite_height"] - pdict["a"] + pdict["ssp_lon"] = self.nc.attrs["sub_longitude"] * 180 / np.pi # it's in radians? + pdict["ncols"] = self.nc.attrs["number_of_columns"] + pdict["nlines"] = self.nc.attrs["number_of_lines"] + obs_mode = self.nc.attrs["observation_mode"] + resolution = self.nc.attrs["channel_spatial_resolution"] # Example offset: 11000.5 # the 'get_area_extent' will handle this half pixel for us - pdict['cfac'] = self.nc.attrs['cfac'] - pdict['coff'] = self.nc.attrs['coff'] - pdict['lfac'] = -self.nc.attrs['lfac'] - pdict['loff'] = self.nc.attrs['loff'] - pdict['scandir'] = 'N2S' - pdict['a_name'] = 'ami_geos_{}'.format(obs_mode.lower()) - pdict['a_desc'] = 'AMI {} Area at {} resolution'.format(obs_mode, resolution) - pdict['p_id'] = 'ami_fixed_grid' + pdict["cfac"] = self.nc.attrs["cfac"] + pdict["coff"] = self.nc.attrs["coff"] + pdict["lfac"] = -self.nc.attrs["lfac"] + pdict["loff"] = self.nc.attrs["loff"] + pdict["scandir"] = "N2S" + pdict["a_name"] = "ami_geos_{}".format(obs_mode.lower()) + pdict["a_desc"] = "AMI {} Area at {} resolution".format(obs_mode, resolution) + pdict["p_id"] = "ami_fixed_grid" area_extent = get_area_extent(pdict) fg_area_def = get_area_definition(pdict, area_extent) @@ -155,12 +155,12 @@ def get_area_def(self, dsid): def get_orbital_parameters(self): """Collect orbital parameters for this file.""" - a = float(self.nc.attrs['earth_equatorial_radius']) - b = float(self.nc.attrs['earth_polar_radius']) + a = float(self.nc.attrs["earth_equatorial_radius"]) + b = float(self.nc.attrs["earth_polar_radius"]) # nominal_satellite_height seems to be from the center of the earth - h = float(self.nc.attrs['nominal_satellite_height']) - a - lon_0 = self.nc.attrs['sub_longitude'] * 180 / np.pi # it's in radians? - sc_position = self.nc['sc_position'].attrs['sc_position_center_pixel'] + h = float(self.nc.attrs["nominal_satellite_height"]) - a + lon_0 = self.nc.attrs["sub_longitude"] * 180 / np.pi # it's in radians? + sc_position = self.nc["sc_position"].attrs["sc_position_center_pixel"] # convert ECEF coordinates to lon, lat, alt ecef = pyproj.CRS.from_dict({"proj": "geocent", "a": a, "b": b}) @@ -169,18 +169,18 @@ def get_orbital_parameters(self): sc_position = transformer.transform(sc_position[0], sc_position[1], sc_position[2]) orbital_parameters = { - 'projection_longitude': float(lon_0), - 'projection_latitude': 0.0, - 'projection_altitude': h, - 'satellite_actual_longitude': sc_position[0], - 'satellite_actual_latitude': sc_position[1], - 'satellite_actual_altitude': sc_position[2], # meters + "projection_longitude": float(lon_0), + "projection_latitude": 0.0, + "projection_altitude": h, + "satellite_actual_longitude": sc_position[0], + "satellite_actual_latitude": sc_position[1], + "satellite_actual_altitude": sc_position[2], # meters } return orbital_parameters def get_dataset(self, dataset_id, ds_info): """Load a dataset as a xarray DataArray.""" - file_key = ds_info.get('file_key', dataset_id['name']) + file_key = ds_info.get("file_key", dataset_id["name"]) data = self.nc[file_key] # hold on to attributes for later attrs = data.attrs @@ -195,47 +195,47 @@ def get_dataset(self, dataset_id, ds_info): qf = data & 0b1100000000000000 # mask DQF bits - bits = attrs['number_of_valid_bits_per_pixel'] + bits = attrs["number_of_valid_bits_per_pixel"] data &= 2**bits - 1 # only take "no error" pixels as valid data = data.where(qf == 0) # Calibration values from file, fall back to built-in if unavailable - gain = self.nc.attrs['DN_to_Radiance_Gain'] - offset = self.nc.attrs['DN_to_Radiance_Offset'] + gain = self.nc.attrs["DN_to_Radiance_Gain"] + offset = self.nc.attrs["DN_to_Radiance_Offset"] - if dataset_id['calibration'] in ('radiance', 'reflectance', 'brightness_temperature'): + if dataset_id["calibration"] in ("radiance", "reflectance", "brightness_temperature"): data = gain * data + offset - if self.calib_mode == 'GSICS': + if self.calib_mode == "GSICS": data = self._apply_gsics_rad_correction(data) elif isinstance(self.user_calibration, dict): data = self._apply_user_rad_correction(data) - if dataset_id['calibration'] == 'reflectance': + if dataset_id["calibration"] == "reflectance": # depends on the radiance calibration above - rad_to_alb = self.nc.attrs['Radiance_to_Albedo_c'] - if ds_info.get('units') == '%': + rad_to_alb = self.nc.attrs["Radiance_to_Albedo_c"] + if ds_info.get("units") == "%": rad_to_alb *= 100 data = data * rad_to_alb - elif dataset_id['calibration'] == 'brightness_temperature': + elif dataset_id["calibration"] == "brightness_temperature": data = self._calibrate_ir(dataset_id, data) - elif dataset_id['calibration'] not in ('counts', 'radiance'): - raise ValueError("Unknown calibration: '{}'".format(dataset_id['calibration'])) + elif dataset_id["calibration"] not in ("counts", "radiance"): + raise ValueError("Unknown calibration: '{}'".format(dataset_id["calibration"])) - for attr_name in ('standard_name', 'units'): + for attr_name in ("standard_name", "units"): attrs[attr_name] = ds_info[attr_name] attrs.update(dataset_id.to_dict()) - attrs['orbital_parameters'] = self.get_orbital_parameters() - attrs['platform_name'] = self.platform_name - attrs['sensor'] = self.sensor + attrs["orbital_parameters"] = self.get_orbital_parameters() + attrs["platform_name"] = self.platform_name + attrs["sensor"] = self.sensor data.attrs = attrs return data def _calibrate_ir(self, dataset_id, data): """Calibrate radiance data to BTs using either pyspectral or in-file coefficients.""" - if self.calib_mode == 'PYSPECTRAL': + if self.calib_mode == "PYSPECTRAL": # depends on the radiance calibration above # Convert um to m^-1 (SI units for pyspectral) - wn = 1 / (dataset_id['wavelength'][1] / 1e6) + wn = 1 / (dataset_id["wavelength"][1] / 1e6) # Convert cm^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. bt_data = rad2temp(wn, data.data * 1e-5) @@ -248,17 +248,17 @@ def _calibrate_ir(self, dataset_id, data): else: # IR coefficients from the file # Channel specific - c0 = self.nc.attrs['Teff_to_Tbb_c0'] - c1 = self.nc.attrs['Teff_to_Tbb_c1'] - c2 = self.nc.attrs['Teff_to_Tbb_c2'] + c0 = self.nc.attrs["Teff_to_Tbb_c0"] + c1 = self.nc.attrs["Teff_to_Tbb_c1"] + c2 = self.nc.attrs["Teff_to_Tbb_c2"] # These should be fixed, but load anyway - cval = self.nc.attrs['light_speed'] - kval = self.nc.attrs['Boltzmann_constant_k'] - hval = self.nc.attrs['Plank_constant_h'] + cval = self.nc.attrs["light_speed"] + kval = self.nc.attrs["Boltzmann_constant_k"] + hval = self.nc.attrs["Plank_constant_h"] # Compute wavenumber as cm-1 - wn = (10000 / dataset_id['wavelength'][1]) * 100 + wn = (10000 / dataset_id["wavelength"][1]) * 100 # Convert radiance to effective brightness temperature e1 = (2 * hval * cval * cval) * np.power(wn, 3) e2 = (data.data * 1e-5) @@ -271,8 +271,8 @@ def _calibrate_ir(self, dataset_id, data): def _apply_gsics_rad_correction(self, data): """Retrieve GSICS factors from L1 file and apply to radiance.""" - rad_slope = self.nc['gsics_coeff_slope'][0] - rad_offset = self.nc['gsics_coeff_intercept'][0] + rad_slope = self.nc["gsics_coeff_slope"][0] + rad_offset = self.nc["gsics_coeff_intercept"][0] data = apply_rad_correction(data, rad_slope, rad_offset) return data diff --git a/satpy/readers/amsr2_l1b.py b/satpy/readers/amsr2_l1b.py index bd3a35c05d..29778c5f0d 100644 --- a/satpy/readers/amsr2_l1b.py +++ b/satpy/readers/amsr2_l1b.py @@ -25,8 +25,8 @@ class AMSR2L1BFileHandler(HDF5FileHandler): def get_metadata(self, ds_id, ds_info): """Get the metadata.""" - var_path = ds_info['file_key'] - info = getattr(self[var_path], 'attrs', {}) + var_path = ds_info["file_key"] + info = getattr(self[var_path], "attrs", {}) info.update(ds_info) info.update({ "shape": self.get_shape(ds_id, ds_info), @@ -41,23 +41,23 @@ def get_metadata(self, ds_id, ds_info): def get_shape(self, ds_id, ds_info): """Get output shape of specified dataset.""" - var_path = ds_info['file_key'] - shape = self[var_path + '/shape'] - if ((ds_info.get('standard_name') == "longitude" or ds_info.get('standard_name') == "latitude") and - ds_id['resolution'] == 10000): + var_path = ds_info["file_key"] + shape = self[var_path + "/shape"] + if ((ds_info.get("standard_name") == "longitude" or ds_info.get("standard_name") == "latitude") and + ds_id["resolution"] == 10000): return shape[0], int(shape[1] / 2) return shape def get_dataset(self, ds_id, ds_info): """Get output data and metadata of specified dataset.""" - var_path = ds_info['file_key'] - fill_value = ds_info.get('fill_value', 65535) + var_path = ds_info["file_key"] + fill_value = ds_info.get("fill_value", 65535) metadata = self.get_metadata(ds_id, ds_info) data = self[var_path] - if ((ds_info.get('standard_name') == "longitude" or - ds_info.get('standard_name') == "latitude") and - ds_id['resolution'] == 10000): + if ((ds_info.get("standard_name") == "longitude" or + ds_info.get("standard_name") == "latitude") and + ds_id["resolution"] == 10000): # FIXME: Lower frequency channels need CoRegistration parameters applied data = data[:, ::2] * self[var_path + "/attr/SCALE FACTOR"] else: diff --git a/satpy/readers/amsr2_l2.py b/satpy/readers/amsr2_l2.py index f241861c22..0797ad5bbd 100644 --- a/satpy/readers/amsr2_l2.py +++ b/satpy/readers/amsr2_l2.py @@ -25,7 +25,7 @@ class AMSR2L2FileHandler(AMSR2L1BFileHandler): def mask_dataset(self, ds_info, data): """Mask data with the fill value.""" - fill_value = ds_info.get('fill_value', 65535) + fill_value = ds_info.get("fill_value", 65535) return data.where(data != fill_value) def scale_dataset(self, var_path, data): @@ -34,14 +34,14 @@ def scale_dataset(self, var_path, data): def get_dataset(self, ds_id, ds_info): """Get output data and metadata of specified dataset.""" - var_path = ds_info['file_key'] + var_path = ds_info["file_key"] data = self[var_path].squeeze() data = self.mask_dataset(ds_info, data) data = self.scale_dataset(var_path, data) - if ds_info.get('name') == "ssw": - data = data.rename({'dim_0': 'y', 'dim_1': 'x'}) + if ds_info.get("name") == "ssw": + data = data.rename({"dim_0": "y", "dim_1": "x"}) metadata = self.get_metadata(ds_id, ds_info) data.attrs.update(metadata) return data diff --git a/satpy/readers/amsr2_l2_gaasp.py b/satpy/readers/amsr2_l2_gaasp.py index 5f91e2d965..54a3769747 100644 --- a/satpy/readers/amsr2_l2_gaasp.py +++ b/satpy/readers/amsr2_l2_gaasp.py @@ -58,19 +58,19 @@ class GAASPFileHandler(BaseFileHandler): """Generic file handler for GAASP output files.""" y_dims: Tuple[str, ...] = ( - 'Number_of_Scans', + "Number_of_Scans", ) x_dims: Tuple[str, ...] = ( - 'Number_of_hi_rez_FOVs', - 'Number_of_low_rez_FOVs', + "Number_of_hi_rez_FOVs", + "Number_of_low_rez_FOVs", ) time_dims = ( - 'Time_Dimension', + "Time_Dimension", ) is_gridded = False dim_resolutions = { - 'Number_of_hi_rez_FOVs': 5000, - 'Number_of_low_rez_FOVs': 10000, + "Number_of_hi_rez_FOVs": 5000, + "Number_of_low_rez_FOVs": 10000, } @cached_property @@ -84,39 +84,39 @@ def nc(self): chunks=chunks) if len(self.time_dims) == 1: - nc = nc.rename({self.time_dims[0]: 'time'}) + nc = nc.rename({self.time_dims[0]: "time"}) return nc @property def start_time(self): """Get start time of observation.""" try: - return self.filename_info['start_time'] + return self.filename_info["start_time"] except KeyError: - time_str = self.nc.attrs['time_coverage_start'] + time_str = self.nc.attrs["time_coverage_start"] return datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get end time of observation.""" try: - return self.filename_info['end_time'] + return self.filename_info["end_time"] except KeyError: - time_str = self.nc.attrs['time_coverage_end'] + time_str = self.nc.attrs["time_coverage_end"] return datetime.strptime(time_str, "%Y-%m-%dT%H:%M:%S.%fZ") @property def sensor_names(self): """Sensors who have data in this file.""" - return {self.nc.attrs['instrument_name'].lower()} + return {self.nc.attrs["instrument_name"].lower()} @property def platform_name(self): """Name of the platform whose data is stored in this file.""" - return self.nc.attrs['platform_name'] + return self.nc.attrs["platform_name"] def _get_var_name_without_suffix(self, var_name): - var_suffix = self.filetype_info.get('var_suffix', "") + var_suffix = self.filetype_info.get("var_suffix", "") if var_suffix: var_name = var_name[:-len(var_suffix)] return var_name @@ -124,8 +124,8 @@ def _get_var_name_without_suffix(self, var_name): def _scale_data(self, data_arr, attrs): # handle scaling # take special care for integer/category fields - scale_factor = attrs.pop('scale_factor', 1.) - add_offset = attrs.pop('add_offset', 0.) + scale_factor = attrs.pop("scale_factor", 1.) + add_offset = attrs.pop("add_offset", 0.) scaling_needed = not (scale_factor == 1 and add_offset == 0) if scaling_needed: data_arr = data_arr * scale_factor + add_offset @@ -138,19 +138,19 @@ def _nan_for_dtype(data_arr_dtype): if data_arr_dtype.type == np.float32: return np.float32(np.nan) if np.issubdtype(data_arr_dtype, np.timedelta64): - return np.timedelta64('NaT') + return np.timedelta64("NaT") if np.issubdtype(data_arr_dtype, np.datetime64): - return np.datetime64('NaT') + return np.datetime64("NaT") return np.nan def _fill_data(self, data_arr, attrs): - fill_value = attrs.pop('_FillValue', None) + fill_value = attrs.pop("_FillValue", None) is_int = np.issubdtype(data_arr.dtype, np.integer) - has_flag_comment = 'comment' in attrs + has_flag_comment = "comment" in attrs if is_int and has_flag_comment: # category product fill_out = fill_value - attrs['_FillValue'] = fill_out + attrs["_FillValue"] = fill_out else: fill_out = self._nan_for_dtype(data_arr.dtype) if fill_value is not None: @@ -159,19 +159,19 @@ def _fill_data(self, data_arr, attrs): def get_dataset(self, dataid, ds_info): """Load, scale, and collect metadata for the specified DataID.""" - orig_var_name = self._get_var_name_without_suffix(dataid['name']) + orig_var_name = self._get_var_name_without_suffix(dataid["name"]) data_arr = self.nc[orig_var_name].copy() attrs = data_arr.attrs.copy() data_arr, attrs = self._scale_data(data_arr, attrs) data_arr, attrs = self._fill_data(data_arr, attrs) attrs.update({ - 'platform_name': self.platform_name, - 'sensor': sorted(self.sensor_names)[0], - 'start_time': self.start_time, - 'end_time': self.end_time, + "platform_name": self.platform_name, + "sensor": sorted(self.sensor_names)[0], + "start_time": self.start_time, + "end_time": self.end_time, }) - dim_map = dict(zip(data_arr.dims, ('y', 'x'))) + dim_map = dict(zip(data_arr.dims, ("y", "x"))) # rename dims data_arr = data_arr.rename(**dim_map) # drop coords, the base reader will recreate these @@ -187,27 +187,27 @@ def _available_if_this_file_type(self, configured_datasets): # file handler so let's yield early yield is_avail, ds_info continue - yield self.file_type_matches(ds_info['file_type']), ds_info + yield self.file_type_matches(ds_info["file_type"]), ds_info def _add_lonlat_coords(self, data_arr, ds_info): lat_coord = None lon_coord = None for coord_name in data_arr.coords: - if 'longitude' in coord_name.lower(): + if "longitude" in coord_name.lower(): lon_coord = coord_name - if 'latitude' in coord_name.lower(): + if "latitude" in coord_name.lower(): lat_coord = coord_name - ds_info['coordinates'] = [lon_coord, lat_coord] + ds_info["coordinates"] = [lon_coord, lat_coord] def _get_ds_info_for_data_arr(self, var_name, data_arr): - var_suffix = self.filetype_info.get('var_suffix', "") + var_suffix = self.filetype_info.get("var_suffix", "") ds_info = { - 'file_type': self.filetype_info['file_type'], - 'name': var_name + var_suffix, + "file_type": self.filetype_info["file_type"], + "name": var_name + var_suffix, } x_dim_name = data_arr.dims[1] if x_dim_name in self.dim_resolutions: - ds_info['resolution'] = self.dim_resolutions[x_dim_name] + ds_info["resolution"] = self.dim_resolutions[x_dim_name] if not self.is_gridded and data_arr.coords: self._add_lonlat_coords(data_arr, ds_info) return ds_info @@ -245,13 +245,13 @@ class GAASPGriddedFileHandler(GAASPFileHandler): """GAASP file handler for gridded products like SEAICE.""" y_dims = ( - 'Number_of_Y_Dimension', + "Number_of_Y_Dimension", ) x_dims = ( - 'Number_of_X_Dimension', + "Number_of_X_Dimension", ) dim_resolutions = { - 'Number_of_X_Dimension': 10000, + "Number_of_X_Dimension": 10000, } is_gridded = True @@ -266,12 +266,12 @@ def _get_extents(data_shape, res): def get_area_def(self, dataid): """Create area definition for equirectangular projected data.""" - var_suffix = self.filetype_info.get('var_suffix', '') - area_name = 'gaasp{}'.format(var_suffix) - orig_var_name = self._get_var_name_without_suffix(dataid['name']) + var_suffix = self.filetype_info.get("var_suffix", "") + area_name = "gaasp{}".format(var_suffix) + orig_var_name = self._get_var_name_without_suffix(dataid["name"]) data_shape = self.nc[orig_var_name].shape - crs = CRS(self.filetype_info['grid_epsg']) - res = dataid['resolution'] + crs = CRS(self.filetype_info["grid_epsg"]) + res = dataid["resolution"] extent = self._get_extents(data_shape, res) area_def = AreaDefinition( area_name, @@ -289,8 +289,8 @@ class GAASPLowResFileHandler(GAASPFileHandler): """GAASP file handler for files that only have low resolution products.""" x_dims = ( - 'Number_of_low_rez_FOVs', + "Number_of_low_rez_FOVs", ) dim_resolutions = { - 'Number_of_low_rez_FOVs': 10000, + "Number_of_low_rez_FOVs": 10000, } diff --git a/satpy/readers/ascat_l2_soilmoisture_bufr.py b/satpy/readers/ascat_l2_soilmoisture_bufr.py index c1a974807d..a5f77fd7eb 100644 --- a/satpy/readers/ascat_l2_soilmoisture_bufr.py +++ b/satpy/readers/ascat_l2_soilmoisture_bufr.py @@ -38,7 +38,7 @@ from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size -logger = logging.getLogger('AscatSoilMoistureBufr') +logger = logging.getLogger("AscatSoilMoistureBufr") CHUNK_SIZE = get_legacy_chunk_size() @@ -53,34 +53,34 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): start_time, end_time = self.get_start_end_date() self.metadata = {} - self.metadata['start_time'] = start_time - self.metadata['end_time'] = end_time + self.metadata["start_time"] = start_time + self.metadata["end_time"] = end_time @property def start_time(self): """Return the start time of data acqusition.""" - return self.metadata['start_time'] + return self.metadata["start_time"] @property def end_time(self): """Return the end time of data acquisition.""" - return self.metadata['end_time'] + return self.metadata["end_time"] @property def platform_name(self): """Return spacecraft name.""" - return self.filename_info['platform'] + return self.filename_info["platform"] def extract_msg_date_extremes(self, bufr, date_min=None, date_max=None): """Extract the minimum and maximum dates from a single bufr message.""" - ec.codes_set(bufr, 'unpack', 1) - size = ec.codes_get(bufr, 'numberOfSubsets') - years = np.resize(ec.codes_get_array(bufr, 'year'), size) - months = np.resize(ec.codes_get_array(bufr, 'month'), size) - days = np.resize(ec.codes_get_array(bufr, 'day'), size) - hours = np.resize(ec.codes_get_array(bufr, 'hour'), size) - minutes = np.resize(ec.codes_get_array(bufr, 'minute'), size) - seconds = np.resize(ec.codes_get_array(bufr, 'second'), size) + ec.codes_set(bufr, "unpack", 1) + size = ec.codes_get(bufr, "numberOfSubsets") + years = np.resize(ec.codes_get_array(bufr, "year"), size) + months = np.resize(ec.codes_get_array(bufr, "month"), size) + days = np.resize(ec.codes_get_array(bufr, "day"), size) + hours = np.resize(ec.codes_get_array(bufr, "hour"), size) + minutes = np.resize(ec.codes_get_array(bufr, "minute"), size) + seconds = np.resize(ec.codes_get_array(bufr, "second"), size) for year, month, day, hour, minute, second in zip(years, months, days, hours, minutes, seconds): time_stamp = datetime(year, month, day, hour, minute, second) date_min = time_stamp if not date_min else min(date_min, time_stamp) @@ -89,7 +89,7 @@ def extract_msg_date_extremes(self, bufr, date_min=None, date_max=None): def get_start_end_date(self): """Get the first and last date from the bufr file.""" - with open(self.filename, 'rb') as fh: + with open(self.filename, "rb") as fh: date_min = None date_max = None while True: @@ -103,16 +103,16 @@ def get_start_end_date(self): def get_bufr_data(self, key): """Get BUFR data by key.""" attr = np.array([]) - with open(self.filename, 'rb') as fh: + with open(self.filename, "rb") as fh: while True: # get handle for message bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break - ec.codes_set(bufr, 'unpack', 1) + ec.codes_set(bufr, "unpack", 1) tmp = ec.codes_get_array(bufr, key, float) if len(tmp) == 1: - size = ec.codes_get(bufr, 'numberOfSubsets') + size = ec.codes_get(bufr, "numberOfSubsets") tmp = np.resize(tmp, size) attr = np.append(attr, tmp) ec.codes_release(bufr) @@ -120,12 +120,12 @@ def get_bufr_data(self, key): def get_dataset(self, dataset_id, dataset_info): """Get dataset using the BUFR key in dataset_info.""" - arr = self.get_bufr_data(dataset_info['key']) - if 'fill_value' in dataset_info: - arr[arr == dataset_info['fill_value']] = np.nan + arr = self.get_bufr_data(dataset_info["key"]) + if "fill_value" in dataset_info: + arr[arr == dataset_info["fill_value"]] = np.nan arr = da.from_array(arr, chunks=CHUNK_SIZE) - xarr = xr.DataArray(arr, dims=["y"], name=dataset_info['name']) - xarr.attrs['platform_name'] = self.platform_name + xarr = xr.DataArray(arr, dims=["y"], name=dataset_info["name"]) + xarr.attrs["platform_name"] = self.platform_name xarr.attrs.update(dataset_info) return xarr diff --git a/satpy/readers/atms_l1b_nc.py b/satpy/readers/atms_l1b_nc.py index 1ea61fe92c..95d48b81cd 100644 --- a/satpy/readers/atms_l1b_nc.py +++ b/satpy/readers/atms_l1b_nc.py @@ -28,7 +28,7 @@ logger = logging.getLogger(__name__) -DATE_FMT = '%Y-%m-%dT%H:%M:%SZ' +DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" class AtmsL1bNCFileHandler(NetCDF4FileHandler): @@ -43,12 +43,12 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): @property def start_time(self): """Get observation start time.""" - return datetime.strptime(self['/attr/time_coverage_start'], DATE_FMT) + return datetime.strptime(self["/attr/time_coverage_start"], DATE_FMT) @property def end_time(self): """Get observation end time.""" - return datetime.strptime(self['/attr/time_coverage_end'], DATE_FMT) + return datetime.strptime(self["/attr/time_coverage_end"], DATE_FMT) @property def platform_name(self): @@ -113,8 +113,8 @@ def _select_dataset(self, name): def get_dataset(self, dataset_id, ds_info): """Get dataset.""" - name = dataset_id['name'] - logger.debug(f'Reading in file to get dataset with name {name}.') + name = dataset_id["name"] + logger.debug(f"Reading in file to get dataset with name {name}.") dataset = self._select_dataset(name) dataset = self._merge_attributes(dataset, ds_info) dataset = self._drop_coords(dataset) diff --git a/satpy/readers/atms_sdr_hdf5.py b/satpy/readers/atms_sdr_hdf5.py index 26fd3d58e5..7f2d43bd71 100644 --- a/satpy/readers/atms_sdr_hdf5.py +++ b/satpy/readers/atms_sdr_hdf5.py @@ -46,8 +46,8 @@ LOG = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() -ATMS_CHANNEL_NAMES = ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', - '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22'] +ATMS_CHANNEL_NAMES = ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", + "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22"] class ATMS_SDR_FileHandler(JPSS_SDR_FileHandler): @@ -55,18 +55,18 @@ class ATMS_SDR_FileHandler(JPSS_SDR_FileHandler): def __init__(self, filename, filename_info, filetype_info, **kwargs): """Initialize file handler.""" - self.datasets = os.path.basename(filename).split('_')[0].split('-') + self.datasets = os.path.basename(filename).split("_")[0].split("-") super().__init__(filename, filename_info, filetype_info, **kwargs) def __getitem__(self, key): """Get item for given key.""" val = self.file_content[key] if isinstance(val, h5py.Dataset): - dset = h5py.File(self.filename, 'r')[key] + dset = h5py.File(self.filename, "r")[key] if dset.ndim == 3: dset_data = da.from_array(dset, chunks=CHUNK_SIZE) attrs = self._attrs_cache.get(key, dset.attrs) - return xr.DataArray(dset_data, dims=['y', 'x', 'z'], attrs=attrs) + return xr.DataArray(dset_data, dims=["y", "x", "z"], attrs=attrs) return super().__getitem__(key) @@ -78,11 +78,11 @@ def _get_atms_channel_index(self, ch_name): return None def _get_scans_per_granule(self, dataset_group): - number_of_granules_path = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateNumberGranules' + number_of_granules_path = "Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateNumberGranules" nb_granules_path = number_of_granules_path.format(dataset_group=DATASET_KEYS[dataset_group]) scans = [] for granule in range(self[nb_granules_path]): - scans_path = 'Data_Products/{dataset_group}/{dataset_group}_Gran_{granule}/attr/N_Number_Of_Scans' + scans_path = "Data_Products/{dataset_group}/{dataset_group}_Gran_{granule}/attr/N_Number_Of_Scans" scans_path = scans_path.format(dataset_group=DATASET_KEYS[dataset_group], granule=granule) scans.append(self[scans_path]) return scans @@ -99,15 +99,15 @@ def get_dataset(self, dataset_id, ds_info): scans actually sensed of course. """ - dataset_group = [ds_group for ds_group in ds_info['dataset_groups'] if ds_group in self.datasets] + dataset_group = [ds_group for ds_group in ds_info["dataset_groups"] if ds_group in self.datasets] if not dataset_group: return dataset_group = dataset_group[0] - ds_info['dataset_group'] = dataset_group + ds_info["dataset_group"] = dataset_group var_path = self._generate_file_key(dataset_id, ds_info) - ch_index = self._get_atms_channel_index(ds_info['name']) + ch_index = self._get_atms_channel_index(ds_info["name"]) data = self.concatenate_dataset(dataset_group, var_path, channel_index=ch_index) data = self.mask_fill_values(data, ds_info) diff --git a/satpy/readers/avhrr_l1b_gaclac.py b/satpy/readers/avhrr_l1b_gaclac.py index e520b29b30..c566175b8c 100644 --- a/satpy/readers/avhrr_l1b_gaclac.py +++ b/satpy/readers/avhrr_l1b_gaclac.py @@ -53,8 +53,8 @@ AVHRR3_CHANNEL_NAMES = {"1": 0, "2": 1, "3A": 2, "3B": 3, "4": 4, "5": 5} AVHRR2_CHANNEL_NAMES = {"1": 0, "2": 1, "3": 2, "4": 3, "5": 4} AVHRR_CHANNEL_NAMES = {"1": 0, "2": 1, "3": 2, "4": 3} -ANGLES = ('sensor_zenith_angle', 'sensor_azimuth_angle', 'solar_zenith_angle', - 'solar_azimuth_angle', 'sun_sensor_azimuth_difference_angle') +ANGLES = ("sensor_zenith_angle", "sensor_azimuth_angle", "solar_zenith_angle", + "solar_azimuth_angle", "sun_sensor_azimuth_difference_angle") class GACLACFile(BaseFileHandler): @@ -84,7 +84,7 @@ def __init__(self, filename, filename_info, filetype_info, self.strip_invalid_coords = strip_invalid_coords self.interpolate_coords = interpolate_coords self.reader_kwargs = reader_kwargs - self.creation_site = filename_info.get('creation_site') + self.creation_site = filename_info.get("creation_site") self.reader = None self.calib_channels = None self.counts = None @@ -92,34 +92,34 @@ def __init__(self, filename, filename_info, filetype_info, self.qual_flags = None self.first_valid_lat = None self.last_valid_lat = None - self._start_time = filename_info['start_time'] - self._end_time = datetime.combine(filename_info['start_time'].date(), - filename_info['end_time'].time()) + self._start_time = filename_info["start_time"] + self._end_time = datetime.combine(filename_info["start_time"].date(), + filename_info["end_time"].time()) if self._end_time < self._start_time: self._end_time += timedelta(days=1) - self.platform_id = filename_info['platform_id'] - if self.platform_id in ['NK', 'NL', 'NM', 'NN', 'NP', 'M1', 'M2', - 'M3']: - if filename_info.get('transfer_mode') == 'GHRR': + self.platform_id = filename_info["platform_id"] + if self.platform_id in ["NK", "NL", "NM", "NN", "NP", "M1", "M2", + "M3"]: + if filename_info.get("transfer_mode") == "GHRR": self.reader_class = GACKLMReader else: self.reader_class = LACKLMReader self.chn_dict = AVHRR3_CHANNEL_NAMES - self.sensor = 'avhrr-3' - elif self.platform_id in ['NC', 'ND', 'NF', 'NH', 'NJ']: - if filename_info.get('transfer_mode') == 'GHRR': + self.sensor = "avhrr-3" + elif self.platform_id in ["NC", "ND", "NF", "NH", "NJ"]: + if filename_info.get("transfer_mode") == "GHRR": self.reader_class = GACPODReader else: self.reader_class = LACPODReader self.chn_dict = AVHRR2_CHANNEL_NAMES - self.sensor = 'avhrr-2' + self.sensor = "avhrr-2" else: - if filename_info.get('transfer_mode') == 'GHRR': + if filename_info.get("transfer_mode") == "GHRR": self.reader_class = GACPODReader else: self.reader_class = LACPODReader self.chn_dict = AVHRR_CHANNEL_NAMES - self.sensor = 'avhrr' + self.sensor = "avhrr" self.filename_info = filename_info def read_raw_data(self): @@ -131,43 +131,43 @@ def read_raw_data(self): **self.reader_kwargs) self.reader.read(self.filename) if np.all(self.reader.mask): - raise ValueError('All data is masked out') + raise ValueError("All data is masked out") def get_dataset(self, key, info): """Get the dataset.""" self.read_raw_data() - if key['name'] in ['latitude', 'longitude']: + if key["name"] in ["latitude", "longitude"]: # Lats/lons are buffered by the reader - if key['name'] == 'latitude': + if key["name"] == "latitude": _, data = self.reader.get_lonlat() else: data, _ = self.reader.get_lonlat() # If coordinate interpolation is disabled, only every eighth # pixel has a lat/lon coordinate - xdim = 'x' if self.interpolate_coords else 'x_every_eighth' + xdim = "x" if self.interpolate_coords else "x_every_eighth" xcoords = None - elif key['name'] in ANGLES: + elif key["name"] in ANGLES: data = self._get_angle(key) - xdim = 'x' if self.interpolate_coords else 'x_every_eighth' + xdim = "x" if self.interpolate_coords else "x_every_eighth" xcoords = None - elif key['name'] == 'qual_flags': + elif key["name"] == "qual_flags": data = self.reader.get_qual_flags() - xdim = 'num_flags' - xcoords = ['Scan line number', - 'Fatal error flag', - 'Insufficient data for calibration', - 'Insufficient data for calibration', - 'Solar contamination of blackbody in channels 3', - 'Solar contamination of blackbody in channels 4', - 'Solar contamination of blackbody in channels 5'] - elif key['name'].upper() in self.chn_dict: + xdim = "num_flags" + xcoords = ["Scan line number", + "Fatal error flag", + "Insufficient data for calibration", + "Insufficient data for calibration", + "Solar contamination of blackbody in channels 3", + "Solar contamination of blackbody in channels 4", + "Solar contamination of blackbody in channels 5"] + elif key["name"].upper() in self.chn_dict: # Read and calibrate channel data data = self._get_channel(key) - xdim = 'x' + xdim = "x" xcoords = None else: - raise ValueError('Unknown dataset: {}'.format(key['name'])) + raise ValueError("Unknown dataset: {}".format(key["name"])) # Update start/end time using the actual scanline timestamps times = self.reader.get_times() @@ -183,7 +183,7 @@ def get_dataset(self, key, info): chunk_cols = data.shape[1] chunk_lines = int((CHUNK_SIZE ** 2) / chunk_cols) res = xr.DataArray(da.from_array(data, chunks=(chunk_lines, chunk_cols)), - dims=['y', xdim], attrs=info) + dims=["y", xdim], attrs=info) if xcoords: res[xdim] = xcoords @@ -191,8 +191,8 @@ def get_dataset(self, key, info): self._update_attrs(res) # Add scanline acquisition times - res['acq_time'] = ('y', times) - res['acq_time'].attrs['long_name'] = 'Mean scanline acquisition time' + res["acq_time"] = ("y", times) + res["acq_time"].attrs["long_name"] = "Mean scanline acquisition time" return res @@ -253,19 +253,19 @@ def _slice(self, data): def _get_channel(self, key): """Get channel and buffer results.""" - name = key['name'] - calibration = key['calibration'] - if calibration == 'counts': + name = key["name"] + calibration = key["calibration"] + if calibration == "counts": if self.counts is None: counts = self.reader.get_counts() self.counts = counts channels = self.counts - elif calibration in ['reflectance', 'brightness_temperature']: + elif calibration in ["reflectance", "brightness_temperature"]: if self.calib_channels is None: self.calib_channels = self.reader.get_calibrated_channels() channels = self.calib_channels else: - raise ValueError('Unknown calibration: {}'.format(calibration)) + raise ValueError("Unknown calibration: {}".format(calibration)) return channels[:, :, self.chn_dict[name.upper()]] def _get_qual_flags(self): @@ -278,12 +278,12 @@ def _get_angle(self, key): """Get angles and buffer results.""" if self.angles is None: sat_azi, sat_zenith, sun_azi, sun_zenith, rel_azi = self.reader.get_angles() - self.angles = {'sensor_zenith_angle': sat_zenith, - 'sensor_azimuth_angle': sat_azi, - 'solar_zenith_angle': sun_zenith, - 'solar_azimuth_angle': sun_azi, - 'sun_sensor_azimuth_difference_angle': rel_azi} - return self.angles[key['name']] + self.angles = {"sensor_zenith_angle": sat_zenith, + "sensor_azimuth_angle": sat_azi, + "solar_zenith_angle": sun_zenith, + "solar_azimuth_angle": sun_azi, + "sun_sensor_azimuth_difference_angle": rel_azi} + return self.angles[key["name"]] def _strip_invalid_lat(self): """Strip scanlines with invalid coordinates in the beginning/end of the orbit. @@ -302,11 +302,11 @@ def _update_attrs(self, res): """Update dataset attributes.""" for attr in self.reader.meta_data: res.attrs[attr] = self.reader.meta_data[attr] - res.attrs['platform_name'] = self.reader.spacecraft_name - res.attrs['orbit_number'] = self.filename_info.get('orbit_number', None) - res.attrs['sensor'] = self.sensor + res.attrs["platform_name"] = self.reader.spacecraft_name + res.attrs["orbit_number"] = self.filename_info.get("orbit_number", None) + res.attrs["sensor"] = self.sensor try: - res.attrs['orbital_parameters'] = {'tle': self.reader.get_tle_lines()} + res.attrs["orbital_parameters"] = {"tle": self.reader.get_tle_lines()} except (IndexError, RuntimeError): pass diff --git a/satpy/readers/caliop_l2_cloud.py b/satpy/readers/caliop_l2_cloud.py index 0fc89ae548..54dd100ffc 100644 --- a/satpy/readers/caliop_l2_cloud.py +++ b/satpy/readers/caliop_l2_cloud.py @@ -46,15 +46,15 @@ def __init__(self, filename, filename_info, filetype_info): self.get_filehandle() - self._start_time = filename_info['start_time'] + self._start_time = filename_info["start_time"] - logger.debug('Retrieving end time from metadata array') + logger.debug("Retrieving end time from metadata array") self.get_end_time() def get_end_time(self): """Get observation end time from file metadata.""" mda_dict = self.filehandle.attributes() - core_mda = mda_dict['coremetadata'] + core_mda = mda_dict["coremetadata"] end_time_str = self.parse_metadata_string(core_mda) self._end_time = datetime.strptime(end_time_str, "%Y-%m-%dT%H:%M:%SZ") @@ -76,19 +76,19 @@ def get_filehandle(self): def get_dataset(self, key, info): """Read data from file and return the corresponding projectables.""" - if key['name'] in ['longitude', 'latitude']: - logger.debug('Reading coordinate arrays.') + if key["name"] in ["longitude", "latitude"]: + logger.debug("Reading coordinate arrays.") if self.lons is None or self.lats is None: self.lons, self.lats = self.get_lonlats() - if key['name'] == 'latitude': + if key["name"] == "latitude": proj = Dataset(self.lats, id=key, **info) else: proj = Dataset(self.lons, id=key, **info) else: - data = self.get_sds_variable(key['name']) + data = self.get_sds_variable(key["name"]) proj = Dataset(data, id=key, **info) return proj @@ -101,8 +101,8 @@ def get_sds_variable(self, name): def get_lonlats(self): """Get longitude and latitude arrays from the file.""" - longitudes = self.get_sds_variable('Longitude') - latitudes = self.get_sds_variable('Latitude') + longitudes = self.get_sds_variable("Longitude") + latitudes = self.get_sds_variable("Latitude") return longitudes, latitudes @property diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py index fd8cd552ae..4303456c04 100644 --- a/satpy/readers/clavrx.py +++ b/satpy/readers/clavrx.py @@ -36,37 +36,37 @@ CHUNK_SIZE = get_legacy_chunk_size() CF_UNITS = { - 'none': '1', + "none": "1", } SENSORS = { - 'MODIS': 'modis', - 'VIIRS': 'viirs', - 'AVHRR': 'avhrr', - 'AHI': 'ahi', - 'ABI': 'abi', - 'GOES-RU-IMAGER': 'abi', + "MODIS": "modis", + "VIIRS": "viirs", + "AVHRR": "avhrr", + "AHI": "ahi", + "ABI": "abi", + "GOES-RU-IMAGER": "abi", } PLATFORMS = { - 'SNPP': 'npp', - 'HIM8': 'himawari8', - 'HIM9': 'himawari9', - 'H08': 'himawari8', - 'H09': 'himawari9', - 'G16': 'GOES-16', - 'G17': 'GOES-17', - 'G18': 'GOES-18', + "SNPP": "npp", + "HIM8": "himawari8", + "HIM9": "himawari9", + "H08": "himawari8", + "H09": "himawari9", + "G16": "GOES-16", + "G17": "GOES-17", + "G18": "GOES-18", } ROWS_PER_SCAN = { - 'viirs': 16, - 'modis': 10, + "viirs": 16, + "modis": 10, } NADIR_RESOLUTION = { - 'viirs': 742, - 'modis': 1000, - 'avhrr': 1050, - 'ahi': 2000, - 'abi': 2004, + "viirs": 742, + "modis": 1000, + "avhrr": 1050, + "ahi": 2000, + "abi": 2004, } @@ -100,8 +100,8 @@ class _CLAVRxHelper: @staticmethod def _remove_attributes(attrs: dict) -> dict: """Remove attributes that described data before scaling.""" - old_attrs = ['unscaled_missing', 'SCALED_MIN', 'SCALED_MAX', - 'SCALED_MISSING'] + old_attrs = ["unscaled_missing", "SCALED_MIN", "SCALED_MAX", + "SCALED_MISSING"] for attr_key in old_attrs: attrs.pop(attr_key, None) @@ -118,15 +118,15 @@ def _scale_data(data_arr: Union[xr.DataArray, int], scale_factor: float, add_off @staticmethod def _get_data(data, dataset_id: dict) -> xr.DataArray: """Get a dataset.""" - if dataset_id.get('resolution'): - data.attrs['resolution'] = dataset_id['resolution'] + if dataset_id.get("resolution"): + data.attrs["resolution"] = dataset_id["resolution"] attrs = data.attrs.copy() - fill = attrs.get('_FillValue') - factor = attrs.pop('scale_factor', (np.ones(1, dtype=data.dtype))[0]) - offset = attrs.pop('add_offset', (np.zeros(1, dtype=data.dtype))[0]) - valid_range = attrs.get('valid_range', [None]) + fill = attrs.get("_FillValue") + factor = attrs.pop("scale_factor", (np.ones(1, dtype=data.dtype))[0]) + offset = attrs.pop("add_offset", (np.zeros(1, dtype=data.dtype))[0]) + valid_range = attrs.get("valid_range", [None]) if isinstance(valid_range, np.ndarray): attrs["valid_range"] = valid_range.tolist() @@ -135,7 +135,7 @@ def _get_data(data, dataset_id: dict) -> xr.DataArray: data = data.where(data != fill) data = _CLAVRxHelper._scale_data(data, factor, offset) # don't need _FillValue if it has been applied. - attrs.pop('_FillValue', None) + attrs.pop("_FillValue", None) if all(valid_range): valid_min = _CLAVRxHelper._scale_data(valid_range[0], factor, offset) @@ -144,7 +144,7 @@ def _get_data(data, dataset_id: dict) -> xr.DataArray: data = data.where((data >= valid_min) & (data <= valid_max), fill) else: data = data.where((data >= valid_min) & (data <= valid_max)) - attrs['valid_range'] = [valid_min, valid_max] + attrs["valid_range"] = [valid_min, valid_max] data.attrs = _CLAVRxHelper._remove_attributes(attrs) @@ -173,29 +173,29 @@ def _read_pug_fixed_grid(projection_coordinates: netCDF4.Variable, distance_mult lon_0 = projection_coordinates.longitude_of_projection_origin sweep_axis = projection_coordinates.sweep_angle_axis[0] - proj_dict = {'a': float(a) * distance_multiplier, - 'b': float(b) * distance_multiplier, - 'lon_0': float(lon_0), - 'h': float(h) * distance_multiplier, - 'proj': 'geos', - 'units': 'm', - 'sweep': sweep_axis} + proj_dict = {"a": float(a) * distance_multiplier, + "b": float(b) * distance_multiplier, + "lon_0": float(lon_0), + "h": float(h) * distance_multiplier, + "proj": "geos", + "units": "m", + "sweep": sweep_axis} return proj_dict @staticmethod def _find_input_nc(filename: str, l1b_base: str) -> str: dirname = os.path.dirname(filename) - l1b_filename = os.path.join(dirname, l1b_base + '.nc') + l1b_filename = os.path.join(dirname, l1b_base + ".nc") if os.path.exists(l1b_filename): return str(l1b_filename) - glob_pat = os.path.join(dirname, l1b_base + '*R20*.nc') + glob_pat = os.path.join(dirname, l1b_base + "*R20*.nc") LOG.debug("searching for {0}".format(glob_pat)) found_l1b_filenames = list(glob(glob_pat)) if len(found_l1b_filenames) == 0: raise IOError("Could not find navigation donor for {0}" " in same directory as CLAVR-x data".format(l1b_base)) - LOG.debug('Candidate nav donors: {0}'.format(repr(found_l1b_filenames))) + LOG.debug("Candidate nav donors: {0}".format(repr(found_l1b_filenames))) return found_l1b_filenames[0] @staticmethod @@ -231,14 +231,14 @@ def _read_axi_fixed_grid(filename: str, l1b_attr) -> geometry.AreaDefinition: if not proj: raise ValueError(f"Unable to recover projection information for {filename}") - h = float(proj['h']) - x, y = l1b['x'], l1b['y'] + h = float(proj["h"]) + x, y = l1b["x"], l1b["y"] area_extent, ncols, nlines = _CLAVRxHelper._area_extent(x, y, h) area = geometry.AreaDefinition( - 'ahi_geos', + "ahi_geos", "AHI L2 file area", - 'ahi_geos', + "ahi_geos", proj, ncols, nlines, @@ -253,24 +253,24 @@ def get_metadata(sensor: str, platform: str, attrs: dict, ds_info: dict) -> dict attr_info.update(attrs) attr_info.update(ds_info) - flag_meanings = attr_info.get('flag_meanings', None) - if not attr_info.get('SCALED', 1) and not flag_meanings: - attr_info['flag_meanings'] = '' - attr_info.setdefault('flag_values', [None]) - elif not attr_info.get('SCALED', 1) and isinstance(flag_meanings, str): + flag_meanings = attr_info.get("flag_meanings", None) + if not attr_info.get("SCALED", 1) and not flag_meanings: + attr_info["flag_meanings"] = "" + attr_info.setdefault("flag_values", [None]) + elif not attr_info.get("SCALED", 1) and isinstance(flag_meanings, str): attr_info["flag_meanings"] = flag_meanings.split(" ") - u = attr_info.get('units') + u = attr_info.get("units") if u in CF_UNITS: # CF compliance - attr_info['units'] = CF_UNITS[u] + attr_info["units"] = CF_UNITS[u] if u.lower() == "none": - attr_info['units'] = "1" - attr_info['sensor'] = sensor - attr_info['platform_name'] = platform + attr_info["units"] = "1" + attr_info["sensor"] = sensor + attr_info["platform_name"] = platform rps = _get_rows_per_scan(sensor) if rps: - attr_info['rows_per_scan'] = rps - attr_info['reader'] = 'clavrx' + attr_info["rows_per_scan"] = rps + attr_info["reader"] = "clavrx" return attr_info @@ -287,16 +287,16 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Get the start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get the end time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) def get_dataset(self, dataset_id, ds_info): """Get a dataset.""" - var_name = ds_info.get('file_key', dataset_id['name']) + var_name = ds_info.get("file_key", dataset_id["name"]) data = self[var_name] data = _CLAVRxHelper._get_data(data, dataset_id) data.attrs = _CLAVRxHelper.get_metadata(self.sensor, self.platform, @@ -308,39 +308,39 @@ def get_nadir_resolution(self, sensor): for k, v in NADIR_RESOLUTION.items(): if sensor.startswith(k): return v - res = self.filename_info.get('resolution') - if res.endswith('m'): + res = self.filename_info.get("resolution") + if res.endswith("m"): return int(res[:-1]) elif res is not None: return int(res) def available_datasets(self, configured_datasets=None): """Automatically determine datasets provided by this file.""" - self.sensor = _get_sensor(self.file_content.get('/attr/sensor')) - self.platform = _get_platform(self.file_content.get('/attr/platform')) + self.sensor = _get_sensor(self.file_content.get("/attr/sensor")) + self.platform = _get_platform(self.file_content.get("/attr/platform")) nadir_resolution = self.get_nadir_resolution(self.sensor) - coordinates = ('longitude', 'latitude') + coordinates = ("longitude", "latitude") handled_variables = set() # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): - this_res = ds_info.get('resolution') - this_coords = ds_info.get('coordinates') + this_res = ds_info.get("resolution") + this_coords = ds_info.get("coordinates") # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info - var_name = ds_info.get('file_key', ds_info['name']) - matches = self.file_type_matches(ds_info['file_type']) + var_name = ds_info.get("file_key", ds_info["name"]) + matches = self.file_type_matches(ds_info["file_type"]) # we can confidently say that we can provide this dataset and can # provide more info if matches and var_name in self and this_res != nadir_resolution: handled_variables.add(var_name) new_info = ds_info.copy() # don't mess up the above yielded - new_info['resolution'] = nadir_resolution + new_info["resolution"] = nadir_resolution if self._is_polar() and this_coords is None: - new_info['coordinates'] = coordinates + new_info["coordinates"] = coordinates yield True, new_info elif is_avail is None: # if we didn't know how to handle this dataset and no one else did @@ -351,31 +351,31 @@ def available_datasets(self, configured_datasets=None): for var_name, val in self.file_content.items(): if isinstance(val, SDS): ds_info = { - 'file_type': self.filetype_info['file_type'], - 'resolution': nadir_resolution, - 'name': var_name, + "file_type": self.filetype_info["file_type"], + "resolution": nadir_resolution, + "name": var_name, } if self._is_polar(): - ds_info['coordinates'] = ['longitude', 'latitude'] + ds_info["coordinates"] = ["longitude", "latitude"] yield True, ds_info def get_shape(self, dataset_id, ds_info): """Get the shape.""" - var_name = ds_info.get('file_key', dataset_id['name']) - return self[var_name + '/shape'] + var_name = ds_info.get("file_key", dataset_id["name"]) + return self[var_name + "/shape"] def _is_polar(self): - l1b_att, inst_att = (str(self.file_content.get('/attr/L1B', None)), - str(self.file_content.get('/attr/sensor', None))) + l1b_att, inst_att = (str(self.file_content.get("/attr/L1B", None)), + str(self.file_content.get("/attr/sensor", None))) - return (inst_att != 'AHI' and 'GOES' not in inst_att) or (l1b_att is None) + return (inst_att != "AHI" and "GOES" not in inst_att) or (l1b_att is None) def get_area_def(self, key): """Get the area definition of the data at hand.""" if self._is_polar(): # then it doesn't have a fixed grid return super(CLAVRXHDF4FileHandler, self).get_area_def(key) - l1b_att = str(self.file_content.get('/attr/L1B', None)) + l1b_att = str(self.file_content.get("/attr/L1B", None)) area_def = _CLAVRxHelper._read_axi_fixed_grid(self.filename, l1b_att) return area_def @@ -396,12 +396,12 @@ def __init__(self, filename, filename_info, filetype_info): decode_coords=True, chunks=CHUNK_SIZE) # y,x is used in satpy, bands rather than channel using in xrimage - self.nc = self.nc.rename_dims({'scan_lines_along_track_direction': "y", - 'pixel_elements_along_scan_direction': "x"}) + self.nc = self.nc.rename_dims({"scan_lines_along_track_direction": "y", + "pixel_elements_along_scan_direction": "x"}) self.platform = _get_platform( - self.filename_info.get('platform_shortname', None)) - self.sensor = _get_sensor(self.nc.attrs.get('sensor', None)) + self.filename_info.get("platform_shortname", None)) + self.sensor = _get_sensor(self.nc.attrs.get("sensor", None)) # coordinates need scaling and valid_range (mask_and_scale won't work on valid_range) self.nc.coords["latitude"] = _CLAVRxHelper._get_data(self.nc.coords["latitude"], {"name": "latitude"}) @@ -410,8 +410,8 @@ def __init__(self, filename, filename_info, filetype_info): def _get_ds_info_for_data_arr(self, var_name): ds_info = { - 'file_type': self.filetype_info['file_type'], - 'name': var_name, + "file_type": self.filetype_info["file_type"], + "name": var_name, } return ds_info @@ -451,28 +451,28 @@ def available_datasets(self, configured_datasets=None): # file handler so let's yield early yield is_avail, ds_info continue - if self.file_type_matches(ds_info['file_type']): - handled_vars.add(ds_info['name']) - yield self.file_type_matches(ds_info['file_type']), ds_info + if self.file_type_matches(ds_info["file_type"]): + handled_vars.add(ds_info["name"]) + yield self.file_type_matches(ds_info["file_type"]), ds_info yield from self._available_new_datasets(handled_vars) def _is_polar(self): - l1b_att, inst_att = (str(self.nc.attrs.get('L1B', None)), - str(self.nc.attrs.get('sensor', None))) + l1b_att, inst_att = (str(self.nc.attrs.get("L1B", None)), + str(self.nc.attrs.get("sensor", None))) - return (inst_att != 'AHI' and 'GOES' not in inst_att) or (l1b_att is None) + return (inst_att != "AHI" and "GOES" not in inst_att) or (l1b_att is None) def get_area_def(self, key): """Get the area definition of the data at hand.""" if self._is_polar(): # then it doesn't have a fixed grid return super(CLAVRXNetCDFFileHandler, self).get_area_def(key) - l1b_att = str(self.nc.attrs.get('L1B', None)) + l1b_att = str(self.nc.attrs.get("L1B", None)) return _CLAVRxHelper._read_axi_fixed_grid(self.filename, l1b_att) def get_dataset(self, dataset_id, ds_info): """Get a dataset.""" - var_name = ds_info.get('name', dataset_id['name']) + var_name = ds_info.get("name", dataset_id["name"]) data = self[var_name] data = _CLAVRxHelper._get_data(data, dataset_id) data.attrs = _CLAVRxHelper.get_metadata(self.sensor, self.platform, diff --git a/satpy/readers/cmsaf_claas2.py b/satpy/readers/cmsaf_claas2.py index f8f360623e..9bf3ca3deb 100644 --- a/satpy/readers/cmsaf_claas2.py +++ b/satpy/readers/cmsaf_claas2.py @@ -87,7 +87,7 @@ def _get_dsinfo(self, var): def get_dataset(self, dataset_id, info): """Get the dataset.""" - ds = self[dataset_id['name']] + ds = self[dataset_id["name"]] if "time" in ds.dims: return ds.squeeze(["time"]) diff --git a/satpy/readers/electrol_hrit.py b/satpy/readers/electrol_hrit.py index 53e69d42b4..c773850a73 100644 --- a/satpy/readers/electrol_hrit.py +++ b/satpy/readers/electrol_hrit.py @@ -40,34 +40,34 @@ time_cds_short, ) -logger = logging.getLogger('hrit_electrol') +logger = logging.getLogger("hrit_electrol") # goms implementation: -key_header = np.dtype([('key_number', 'u1'), - ('seed', '>f8')]) - -segment_identification = np.dtype([('GP_SC_ID', '>i2'), - ('spectral_channel_id', '>i1'), - ('segment_sequence_number', '>u2'), - ('planned_start_segment_number', '>u2'), - ('planned_end_segment_number', '>u2'), - ('data_field_representation', '>i1')]) - -image_segment_line_quality = np.dtype([('line_number_in_grid', '>i4'), - ('line_mean_acquisition', - [('days', '>u2'), - ('milliseconds', '>u4')]), - ('line_validity', 'u1'), - ('line_radiometric_quality', 'u1'), - ('line_geometric_quality', 'u1')]) +key_header = np.dtype([("key_number", "u1"), + ("seed", ">f8")]) + +segment_identification = np.dtype([("GP_SC_ID", ">i2"), + ("spectral_channel_id", ">i1"), + ("segment_sequence_number", ">u2"), + ("planned_start_segment_number", ">u2"), + ("planned_end_segment_number", ">u2"), + ("data_field_representation", ">i1")]) + +image_segment_line_quality = np.dtype([("line_number_in_grid", ">i4"), + ("line_mean_acquisition", + [("days", ">u2"), + ("milliseconds", ">u4")]), + ("line_validity", "u1"), + ("line_radiometric_quality", "u1"), + ("line_geometric_quality", "u1")]) goms_variable_length_headers = { - image_segment_line_quality: 'image_segment_line_quality'} + image_segment_line_quality: "image_segment_line_quality"} -goms_text_headers = {image_data_function: 'image_data_function', - annotation_header: 'annotation_header', - ancillary_text: 'ancillary_text'} +goms_text_headers = {image_data_function: "image_data_function", + annotation_header: "annotation_header", + ancillary_text: "ancillary_text"} goms_hdr_map = base_hdr_map.copy() goms_hdr_map.update({7: key_header, @@ -76,28 +76,28 @@ }) -orbit_coef = np.dtype([('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('X', '>f8', (8, )), - ('Y', '>f8', (8, )), - ('Z', '>f8', (8, )), - ('VX', '>f8', (8, )), - ('VY', '>f8', (8, )), - ('VZ', '>f8', (8, ))]) +orbit_coef = np.dtype([("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("X", ">f8", (8, )), + ("Y", ">f8", (8, )), + ("Z", ">f8", (8, )), + ("VX", ">f8", (8, )), + ("VY", ">f8", (8, )), + ("VZ", ">f8", (8, ))]) -attitude_coef = np.dtype([('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('XofSpinAxis', '>f8', (8, )), - ('YofSpinAxis', '>f8', (8, )), - ('ZofSpinAxis', '>f8', (8, ))]) +attitude_coef = np.dtype([("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("XofSpinAxis", ">f8", (8, )), + ("YofSpinAxis", ">f8", (8, )), + ("ZofSpinAxis", ">f8", (8, ))]) -cuc_time = np.dtype([('coarse', 'u1', (4, )), - ('fine', 'u1', (3, ))]) +cuc_time = np.dtype([("coarse", "u1", (4, )), + ("fine", "u1", (3, ))]) -time_cds_expanded = np.dtype([('days', '>u2'), - ('milliseconds', '>u4'), - ('microseconds', '>u2'), - ('nanoseconds', '>u2')]) +time_cds_expanded = np.dtype([("days", ">u2"), + ("milliseconds", ">u4"), + ("microseconds", ">u2"), + ("nanoseconds", ">u2")]) satellite_status = np.dtype([("TagType", " 16777216: lut = lut.astype(np.float64) else: @@ -337,26 +337,26 @@ def _calibrate(self, data): def get_area_def(self, dsid): """Get the area definition of the band.""" pdict = {} - pdict['cfac'] = np.int32(self.mda['cfac']) - pdict['lfac'] = np.int32(self.mda['lfac']) - pdict['coff'] = np.float32(self.mda['coff']) - pdict['loff'] = np.float32(self.mda['loff']) + pdict["cfac"] = np.int32(self.mda["cfac"]) + pdict["lfac"] = np.int32(self.mda["lfac"]) + pdict["coff"] = np.float32(self.mda["coff"]) + pdict["loff"] = np.float32(self.mda["loff"]) - pdict['a'] = 6378169.00 - pdict['b'] = 6356583.80 - pdict['h'] = 35785831.00 - pdict['scandir'] = 'N2S' + pdict["a"] = 6378169.00 + pdict["b"] = 6356583.80 + pdict["h"] = 35785831.00 + pdict["scandir"] = "N2S" - pdict['ssp_lon'] = self.mda['projection_parameters']['SSP_longitude'] + pdict["ssp_lon"] = self.mda["projection_parameters"]["SSP_longitude"] - pdict['nlines'] = int(self.mda['number_of_lines']) - pdict['ncols'] = int(self.mda['number_of_columns']) + pdict["nlines"] = int(self.mda["number_of_lines"]) + pdict["ncols"] = int(self.mda["number_of_columns"]) - pdict['loff'] = pdict['nlines'] - pdict['loff'] + pdict["loff"] = pdict["nlines"] - pdict["loff"] - pdict['a_name'] = 'geosgoms' - pdict['a_desc'] = 'Electro-L/GOMS channel area' - pdict['p_id'] = 'goms' + pdict["a_name"] = "geosgoms" + pdict["a_desc"] = "Electro-L/GOMS channel area" + pdict["p_id"] = "goms" area_extent = get_area_extent(pdict) area = get_area_definition(pdict, area_extent) diff --git a/satpy/readers/epic_l1b_h5.py b/satpy/readers/epic_l1b_h5.py index d83bf6893d..3fb8f69c01 100644 --- a/satpy/readers/epic_l1b_h5.py +++ b/satpy/readers/epic_l1b_h5.py @@ -49,16 +49,16 @@ # Level 1b is given as counts. These factors convert to reflectance. # Retrieved from: https://asdc.larc.nasa.gov/documents/dscovr/DSCOVR_EPIC_Calibration_Factors_V03.pdf -CALIB_COEFS = {'B317': 1.216e-4, - 'B325': 1.111e-4, - 'B340': 1.975e-5, - 'B388': 2.685e-5, - 'B443': 8.34e-6, - 'B551': 6.66e-6, - 'B680': 9.3e-6, - 'B688': 2.02e-5, - 'B764': 2.36e-5, - 'B780': 1.435e-5} +CALIB_COEFS = {"B317": 1.216e-4, + "B325": 1.111e-4, + "B340": 1.975e-5, + "B388": 2.685e-5, + "B443": 8.34e-6, + "B551": 6.66e-6, + "B680": 9.3e-6, + "B688": 2.02e-5, + "B764": 2.36e-5, + "B780": 1.435e-5} class DscovrEpicL1BH5FileHandler(HDF5FileHandler): @@ -68,19 +68,19 @@ def __init__(self, filename, filename_info, filetype_info): """Init filehandler.""" super(DscovrEpicL1BH5FileHandler, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'epic' - self.platform_name = 'DSCOVR' + self.sensor = "epic" + self.platform_name = "DSCOVR" @property def start_time(self): """Get the start time.""" - start_time = datetime.strptime(self.file_content['/attr/begin_time'], '%Y-%m-%d %H:%M:%S') + start_time = datetime.strptime(self.file_content["/attr/begin_time"], "%Y-%m-%d %H:%M:%S") return start_time @property def end_time(self): """Get the end time.""" - end_time = datetime.strptime(self.file_content['/attr/end_time'], '%Y-%m-%d %H:%M:%S') + end_time = datetime.strptime(self.file_content["/attr/end_time"], "%Y-%m-%d %H:%M:%S") return end_time @staticmethod @@ -97,19 +97,19 @@ def calibrate(data, ds_name, calibration=None): def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" - ds_name = dataset_id['name'] + ds_name = dataset_id["name"] - logger.debug('Reading in get_dataset %s.', ds_name) - file_key = ds_info.get('file_key', ds_name) + logger.debug("Reading in get_dataset %s.", ds_name) + file_key = ds_info.get("file_key", ds_name) band = self._mask_infinite(self.get(file_key)) - band = self.calibrate(band, ds_name, calibration=dataset_id.get('calibration')) + band = self.calibrate(band, ds_name, calibration=dataset_id.get("calibration")) band = self._update_metadata(band) return band def _update_metadata(self, band): - band = band.rename({band.dims[0]: 'x', band.dims[1]: 'y'}) - band.attrs.update({'platform_name': self.platform_name, 'sensor': self.sensor}) + band = band.rename({band.dims[0]: "x", band.dims[1]: "y"}) + band.attrs.update({"platform_name": self.platform_name, "sensor": self.sensor}) return band diff --git a/satpy/readers/eps_l1b.py b/satpy/readers/eps_l1b.py index 1cc098a612..23e4ca712d 100644 --- a/satpy/readers/eps_l1b.py +++ b/satpy/readers/eps_l1b.py @@ -90,11 +90,11 @@ def read_records(filename): the_type = form.dtype((rec_class, sub_class)) # the_descr = grh_dtype.descr + the_type.descr except KeyError: - the_type = np.dtype([('unknown', 'V%d' % bare_size)]) + the_type = np.dtype([("unknown", "V%d" % bare_size)]) the_descr = grh_dtype.descr + the_type.descr the_type = np.dtype(the_descr) if the_type.itemsize < expected_size: - padding = [('unknown%d' % cnt, 'V%d' % (expected_size - the_type.itemsize))] + padding = [("unknown%d" % cnt, "V%d" % (expected_size - the_type.itemsize))] cnt += 1 the_descr += padding new_dtype = np.dtype(the_descr) @@ -112,14 +112,14 @@ def read_records(filename): offset = 0 for dtype, count, rec_class in zip(dtypes, counts, classes): fdes.seek(offset) - if rec_class == ('mdr', 2): - record = da.from_array(np.memmap(fdes, mode='r', dtype=dtype, shape=count, offset=offset), + if rec_class == ("mdr", 2): + record = da.from_array(np.memmap(fdes, mode="r", dtype=dtype, shape=count, offset=offset), chunks=(max_lines,)) else: record = np.fromfile(fdes, dtype=dtype, count=count) offset += dtype.itemsize * count if rec_class in sections: - logger.debug('Multiple records for ', str(rec_class)) + logger.debug("Multiple records for ", str(rec_class)) sections[rec_class] = np.hstack((sections[rec_class], record)) else: sections[rec_class] = record @@ -130,7 +130,7 @@ def read_records(filename): def create_xarray(arr): """Create xarray with correct dimensions.""" res = arr - res = xr.DataArray(res, dims=['y', 'x']) + res = xr.DataArray(res, dims=["y", "x"]) return res @@ -152,8 +152,8 @@ def __init__(self, filename, filename_info, filetype_info): filename, filename_info, filetype_info) self.area = None - self._start_time = filename_info['start_time'] - self._end_time = filename_info['end_time'] + self._start_time = filename_info["start_time"] + self._end_time = filename_info["end_time"] self.form = None self.scanlines = None self.pixels = None @@ -168,10 +168,10 @@ def __init__(self, filename, filename_info, filetype_info): def _read_all(self): logger.debug("Reading %s", self.filename) self.sections, self.form = read_records(self.filename) - self.scanlines = self['TOTAL_MDR'] - if self.scanlines != len(self.sections[('mdr', 2)]): + self.scanlines = self["TOTAL_MDR"] + if self.scanlines != len(self.sections[("mdr", 2)]): logger.warning("Number of declared records doesn't match number of scanlines in the file.") - self.scanlines = len(self.sections[('mdr', 2)]) + self.scanlines = len(self.sections[("mdr", 2)]) self.pixels = self["EARTH_VIEWS_PER_SCANLINE"] def __getitem__(self, key): @@ -287,24 +287,24 @@ def get_dataset(self, key, info): if self.sections is None: self._read_all() - if key['name'] in ['longitude', 'latitude']: + if key["name"] in ["longitude", "latitude"]: lons, lats = self.get_full_lonlats() - if key['name'] == 'longitude': + if key["name"] == "longitude": dataset = create_xarray(lons) else: dataset = create_xarray(lats) - elif key['name'] in ['solar_zenith_angle', 'solar_azimuth_angle', - 'satellite_zenith_angle', 'satellite_azimuth_angle']: + elif key["name"] in ["solar_zenith_angle", "solar_azimuth_angle", + "satellite_zenith_angle", "satellite_azimuth_angle"]: dataset = self._get_angle_dataarray(key) - elif key['name'] in ["1", "2", "3a", "3A", "3b", "3B", "4", "5"]: + elif key["name"] in ["1", "2", "3a", "3A", "3b", "3B", "4", "5"]: dataset = self._get_calibrated_dataarray(key) else: - logger.info("Can't load channel in eps_l1b: " + str(key['name'])) + logger.info("Can't load channel in eps_l1b: " + str(key["name"])) return - dataset.attrs['platform_name'] = self.platform_name - dataset.attrs['sensor'] = self.sensor_name + dataset.attrs["platform_name"] = self.platform_name + dataset.attrs["sensor"] = self.sensor_name if "calibration" in key: dataset.attrs["units"] = self.units[key["calibration"]] dataset.attrs.update(info) @@ -314,13 +314,13 @@ def get_dataset(self, key, info): def _get_angle_dataarray(self, key): """Get an angle dataarray.""" sun_azi, sun_zen, sat_azi, sat_zen = self.get_full_angles() - if key['name'] == 'solar_zenith_angle': + if key["name"] == "solar_zenith_angle": dataset = create_xarray(sun_zen) - elif key['name'] == 'solar_azimuth_angle': + elif key["name"] == "solar_azimuth_angle": dataset = create_xarray(sun_azi) - if key['name'] == 'satellite_zenith_angle': + if key["name"] == "satellite_zenith_angle": dataset = create_xarray(sat_zen) - elif key['name'] == 'satellite_azimuth_angle': + elif key["name"] == "satellite_azimuth_angle": dataset = create_xarray(sat_azi) return dataset @@ -336,26 +336,26 @@ def three_b_mask(self): def _get_calibrated_dataarray(self, key): """Get a calibrated dataarray.""" - if key['calibration'] not in ['reflectance', 'brightness_temperature', 'radiance']: - raise ValueError('calibration type ' + str(key['calibration']) + - ' is not supported!') + if key["calibration"] not in ["reflectance", "brightness_temperature", "radiance"]: + raise ValueError("calibration type " + str(key["calibration"]) + + " is not supported!") mask = None - channel_name = key['name'].upper() + channel_name = key["name"].upper() radiance_indices = {"1": 0, "2": 1, "3A": 2, "3B": 2, "4": 3, "5": 4} array = self["SCENE_RADIANCES"][:, radiance_indices[channel_name], :] if channel_name in ["1", "2", "3A"]: - if key['calibration'] == 'reflectance': + if key["calibration"] == "reflectance": array = radiance_to_refl(array, self[f"CH{channel_name}_SOLAR_FILTERED_IRRADIANCE"]) if channel_name == "3A": mask = self.three_a_mask[:, np.newaxis] if channel_name in ["3B", "4", "5"]: - if key['calibration'] == 'brightness_temperature': + if key["calibration"] == "brightness_temperature": array = radiance_to_bt(array, self[f"CH{channel_name}_CENTRAL_WAVENUMBER"], self[f"CH{channel_name}_CONSTANT1"], @@ -373,7 +373,7 @@ def get_lonlats(self): if self.area is None: lons, lats = self.get_full_lonlats() self.area = SwathDefinition(lons, lats) - self.area.name = '_'.join([self.platform_name, str(self.start_time), + self.area.name = "_".join([self.platform_name, str(self.start_time), str(self.end_time)]) return self.area diff --git a/satpy/readers/eum_base.py b/satpy/readers/eum_base.py index 76abcf035c..916ba9d444 100644 --- a/satpy/readers/eum_base.py +++ b/satpy/readers/eum_base.py @@ -22,10 +22,10 @@ import numpy as np # 6 bytes, 8 bytes, 10 bytes -time_cds_short = [('Days', '>u2'), ('Milliseconds', '>u4')] -time_cds = time_cds_short + [('Microseconds', '>u2')] -time_cds_expanded = time_cds + [('Nanoseconds', '>u2')] -issue_revision = [('Issue', np.uint16), ('Revision', np.uint16)] +time_cds_short = [("Days", ">u2"), ("Milliseconds", ">u4")] +time_cds = time_cds_short + [("Microseconds", ">u2")] +time_cds_expanded = time_cds + [("Nanoseconds", ">u2")] +issue_revision = [("Issue", np.uint16), ("Revision", np.uint16)] def timecds2datetime(tcds): @@ -33,14 +33,14 @@ def timecds2datetime(tcds): Works both with a dictionary and a numpy record_array. """ - days = int(tcds['Days']) - milliseconds = int(tcds['Milliseconds']) + days = int(tcds["Days"]) + milliseconds = int(tcds["Milliseconds"]) try: - microseconds = int(tcds['Microseconds']) + microseconds = int(tcds["Microseconds"]) except (KeyError, ValueError): microseconds = 0 try: - microseconds += int(tcds['Nanoseconds']) / 1000. + microseconds += int(tcds["Nanoseconds"]) / 1000. except (KeyError, ValueError): pass @@ -71,14 +71,14 @@ def recarray2dict(arr): else: if data.size == 1: data = data[0] - if ntype[:2] == '|S': + if ntype[:2] == "|S": # Python2 and Python3 handle strings differently try: data = data.decode() except ValueError: data = None else: - data = data.split(':')[0].strip() + data = data.split(":")[0].strip() res[key] = data else: res[key] = data.squeeze() @@ -88,15 +88,15 @@ def recarray2dict(arr): def get_service_mode(instrument_name, ssp_lon): """Get information about service mode for a given instrument and subsatellite longitude.""" - service_modes = {'seviri': {'0.0': {'service_name': 'fes', 'service_desc': 'Full Earth Scanning service'}, - '9.5': {'service_name': 'rss', 'service_desc': 'Rapid Scanning Service'}, - '41.5': {'service_name': 'iodc', 'service_desc': 'Indian Ocean Data Coverage service'}, - '45.5': {'service_name': 'iodc', 'service_desc': 'Indian Ocean Data Coverage service'} - }, - 'fci': {'0.0': {'service_name': 'fdss', 'service_desc': 'Full Disk Scanning Service'}, - '9.5': {'service_name': 'rss', 'service_desc': 'Rapid Scanning Service'}, + service_modes = {"seviri": {"0.0": {"service_name": "fes", "service_desc": "Full Earth Scanning service"}, + "9.5": {"service_name": "rss", "service_desc": "Rapid Scanning Service"}, + "41.5": {"service_name": "iodc", "service_desc": "Indian Ocean Data Coverage service"}, + "45.5": {"service_name": "iodc", "service_desc": "Indian Ocean Data Coverage service"} }, + "fci": {"0.0": {"service_name": "fdss", "service_desc": "Full Disk Scanning Service"}, + "9.5": {"service_name": "rss", "service_desc": "Rapid Scanning Service"}, + }, } - unknown_modes = {'service_name': 'unknown', 'service_desc': 'unknown'} + unknown_modes = {"service_name": "unknown", "service_desc": "unknown"} - return service_modes.get(instrument_name, unknown_modes).get('{:.1f}'.format(ssp_lon), unknown_modes) + return service_modes.get(instrument_name, unknown_modes).get("{:.1f}".format(ssp_lon), unknown_modes) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index 8e28219035..e42975b3a4 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -68,8 +68,8 @@ The reading routine supports channel data in counts, radiances, and (depending -on channel) brightness temperatures or reflectances. The brightness temperature and reflectance calculation is based on the formulas indicated in -`PUG`_. +on channel) brightness temperatures or reflectances. The brightness temperature and reflectance calculation is based on +the formulas indicated in `PUG`_. Radiance datasets are returned in units of radiance per unit wavenumber (mW m-2 sr-1 (cm-1)-1). Radiances can be converted to units of radiance per unit wavelength (W m-2 um-1 sr-1) by multiplying with the `radiance_unit_conversion_coefficient` dataset attribute. @@ -131,26 +131,26 @@ # dict containing all available auxiliary data parameters to be read using the index map. Keys are the # parameter name and values are the paths to the variable inside the netcdf AUX_DATA = { - 'subsatellite_latitude': 'state/platform/subsatellite_latitude', - 'subsatellite_longitude': 'state/platform/subsatellite_longitude', - 'platform_altitude': 'state/platform/platform_altitude', - 'subsolar_latitude': 'state/celestial/subsolar_latitude', - 'subsolar_longitude': 'state/celestial/subsolar_longitude', - 'earth_sun_distance': 'state/celestial/earth_sun_distance', - 'sun_satellite_distance': 'state/celestial/sun_satellite_distance', - 'time': 'time', - 'swath_number': 'data/swath_number', - 'swath_direction': 'data/swath_direction', + "subsatellite_latitude": "state/platform/subsatellite_latitude", + "subsatellite_longitude": "state/platform/subsatellite_longitude", + "platform_altitude": "state/platform/platform_altitude", + "subsolar_latitude": "state/celestial/subsolar_latitude", + "subsolar_longitude": "state/celestial/subsolar_longitude", + "earth_sun_distance": "state/celestial/earth_sun_distance", + "sun_satellite_distance": "state/celestial/sun_satellite_distance", + "time": "time", + "swath_number": "data/swath_number", + "swath_direction": "data/swath_direction", } -HIGH_RES_GRID_INFO = {'fci_l1c_hrfi': {'grid_type': '500m', - 'grid_width': 22272}, - 'fci_l1c_fdhsi': {'grid_type': '1km', - 'grid_width': 11136}} -LOW_RES_GRID_INFO = {'fci_l1c_hrfi': {'grid_type': '1km', - 'grid_width': 11136}, - 'fci_l1c_fdhsi': {'grid_type': '2km', - 'grid_width': 5568}} +HIGH_RES_GRID_INFO = {"fci_l1c_hrfi": {"grid_type": "500m", + "grid_width": 22272}, + "fci_l1c_fdhsi": {"grid_type": "1km", + "grid_width": 11136}} +LOW_RES_GRID_INFO = {"fci_l1c_hrfi": {"grid_type": "1km", + "grid_width": 11136}, + "fci_l1c_fdhsi": {"grid_type": "2km", + "grid_width": 5568}} def _get_aux_data_name_from_dsname(dsname): @@ -206,9 +206,9 @@ def __init__(self, filename, filename_info, filetype_info): filetype_info, cache_var_size=0, cache_handle=True) - logger.debug('Reading: {}'.format(self.filename)) - logger.debug('Start: {}'.format(self.start_time)) - logger.debug('End: {}'.format(self.end_time)) + logger.debug("Reading: {}".format(self.filename)) + logger.debug("Start: {}".format(self.start_time)) + logger.debug("End: {}".format(self.end_time)) self._cache = {} @@ -218,7 +218,7 @@ def rc_period_min(self): As RSS is not yet implemeted and error will be raised if RSS are to be read """ - if not self.filename_info['coverage'] == 'FD': + if not self.filename_info["coverage"] == "FD": raise NotImplementedError(f"coverage for {self.filename_info['coverage']} not supported by this reader") return 2.5 return 10 @@ -227,7 +227,7 @@ def rc_period_min(self): def nominal_start_time(self): """Get nominal start time.""" rc_date = self.observation_start_time.replace(hour=0, minute=0, second=0, microsecond=0) - return rc_date + timedelta(minutes=(self.filename_info['repeat_cycle_in_day']-1)*self.rc_period_min) + return rc_date + timedelta(minutes=(self.filename_info["repeat_cycle_in_day"]-1)*self.rc_period_min) @property def nominal_end_time(self): @@ -237,12 +237,12 @@ def nominal_end_time(self): @property def observation_start_time(self): """Get observation start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def observation_end_time(self): """Get observation end time.""" - return self.filename_info['end_time'] + return self.filename_info["end_time"] @property def start_time(self): @@ -256,9 +256,9 @@ def end_time(self): def get_channel_measured_group_path(self, channel): """Get the channel's measured group path.""" - if self.filetype_info['file_type'] == 'fci_l1c_hrfi': - channel += '_hr' - measured_group_path = 'data/{}/measured'.format(channel) + if self.filetype_info["file_type"] == "fci_l1c_hrfi": + channel += "_hr" + measured_group_path = "data/{}/measured".format(channel) return measured_group_path @@ -273,25 +273,25 @@ def get_segment_position_info(self): Note: in the FCI terminology, a segment is actually called "chunk". To avoid confusion with the dask concept of chunk, and to be consistent with SEVIRI, we opt to use the word segment. """ - vis_06_measured_path = self.get_channel_measured_group_path('vis_06') - ir_105_measured_path = self.get_channel_measured_group_path('ir_105') + vis_06_measured_path = self.get_channel_measured_group_path("vis_06") + ir_105_measured_path = self.get_channel_measured_group_path("ir_105") - file_type = self.filetype_info['file_type'] + file_type = self.filetype_info["file_type"] segment_position_info = { - HIGH_RES_GRID_INFO[file_type]['grid_type']: { - 'start_position_row': self.get_and_cache_npxr(vis_06_measured_path + '/start_position_row').item(), - 'end_position_row': self.get_and_cache_npxr(vis_06_measured_path + '/end_position_row').item(), - 'segment_height': self.get_and_cache_npxr(vis_06_measured_path + '/end_position_row').item() - - self.get_and_cache_npxr(vis_06_measured_path + '/start_position_row').item() + 1, - 'grid_width': HIGH_RES_GRID_INFO[file_type]['grid_width'] + HIGH_RES_GRID_INFO[file_type]["grid_type"]: { + "start_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item(), + "end_position_row": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item(), + "segment_height": self.get_and_cache_npxr(vis_06_measured_path + "/end_position_row").item() - + self.get_and_cache_npxr(vis_06_measured_path + "/start_position_row").item() + 1, + "grid_width": HIGH_RES_GRID_INFO[file_type]["grid_width"] }, - LOW_RES_GRID_INFO[file_type]['grid_type']: { - 'start_position_row': self.get_and_cache_npxr(ir_105_measured_path + '/start_position_row').item(), - 'end_position_row': self.get_and_cache_npxr(ir_105_measured_path + '/end_position_row').item(), - 'segment_height': self.get_and_cache_npxr(ir_105_measured_path + '/end_position_row').item() - - self.get_and_cache_npxr(ir_105_measured_path + '/start_position_row').item() + 1, - 'grid_width': LOW_RES_GRID_INFO[file_type]['grid_width'] + LOW_RES_GRID_INFO[file_type]["grid_type"]: { + "start_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item(), + "end_position_row": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item(), + "segment_height": self.get_and_cache_npxr(ir_105_measured_path + "/end_position_row").item() - + self.get_and_cache_npxr(ir_105_measured_path + "/start_position_row").item() + 1, + "grid_width": LOW_RES_GRID_INFO[file_type]["grid_width"] } } @@ -299,14 +299,14 @@ def get_segment_position_info(self): def get_dataset(self, key, info=None): """Load a dataset.""" - logger.debug('Reading {} from {}'.format(key['name'], self.filename)) - if "pixel_quality" in key['name']: - return self._get_dataset_quality(key['name']) - elif "index_map" in key['name']: - return self._get_dataset_index_map(key['name']) - elif _get_aux_data_name_from_dsname(key['name']) is not None: - return self._get_dataset_aux_data(key['name']) - elif any(lb in key['name'] for lb in {"vis_", "ir_", "nir_", "wv_"}): + logger.debug("Reading {} from {}".format(key["name"], self.filename)) + if "pixel_quality" in key["name"]: + return self._get_dataset_quality(key["name"]) + elif "index_map" in key["name"]: + return self._get_dataset_index_map(key["name"]) + elif _get_aux_data_name_from_dsname(key["name"]) is not None: + return self._get_dataset_aux_data(key["name"]) + elif any(lb in key["name"] for lb in {"vis_", "ir_", "nir_", "wv_"}): return self._get_dataset_measurand(key, info=info) else: raise ValueError("Unknown dataset key, not a channel, quality or auxiliary data: " @@ -321,7 +321,7 @@ def _get_dataset_measurand(self, key, info=None): """ # Get the dataset # Get metadata for given dataset - measured = self.get_channel_measured_group_path(key['name']) + measured = self.get_channel_measured_group_path(key["name"]) data = self[measured + "/effective_radiance"] attrs = dict(data.attrs).copy() @@ -332,7 +332,7 @@ def _get_dataset_measurand(self, key, info=None): "FillValue", default_fillvals.get(data.dtype.str[1:], np.nan)) vr = attrs.get("valid_range", [-np.inf, np.inf]) - if key['calibration'] == "counts": + if key["calibration"] == "counts": attrs["_FillValue"] = fv nfv = fv else: @@ -357,7 +357,7 @@ def _get_dataset_measurand(self, key, info=None): # https://github.com/pytroll/satpy/issues/1171. if "pixel_quality" in attrs["ancillary_variables"]: attrs["ancillary_variables"] = attrs["ancillary_variables"].replace( - "pixel_quality", key['name'] + "_pixel_quality") + "pixel_quality", key["name"] + "_pixel_quality") else: raise ValueError( "Unexpected value for attribute ancillary_variables, " @@ -373,20 +373,20 @@ def _get_dataset_measurand(self, key, info=None): self["attr/platform"], self["attr/platform"]) # remove unpacking parameters for calibrated data - if key['calibration'] in ['brightness_temperature', 'reflectance']: + if key["calibration"] in ["brightness_temperature", "reflectance"]: res.attrs.pop("add_offset") res.attrs.pop("warm_add_offset") res.attrs.pop("scale_factor") res.attrs.pop("warm_scale_factor") # remove attributes from original file which don't apply anymore - res.attrs.pop('long_name') + res.attrs.pop("long_name") # Add time_parameter attributes - res.attrs['time_parameters'] = { - 'nominal_start_time': self.nominal_start_time, - 'nominal_end_time': self.nominal_end_time, - 'observation_start_time': self.observation_start_time, - 'observation_end_time': self.observation_end_time, + res.attrs["time_parameters"] = { + "nominal_start_time": self.nominal_start_time, + "nominal_end_time": self.nominal_end_time, + "observation_start_time": self.observation_start_time, + "observation_end_time": self.observation_end_time, } res.attrs.update(self.orbital_param) @@ -395,9 +395,9 @@ def _get_dataset_measurand(self, key, info=None): @cached_property def orbital_param(self): """Compute the orbital parameters for the current segment.""" - actual_subsat_lon = float(np.nanmean(self._get_aux_data_lut_vector('subsatellite_longitude'))) - actual_subsat_lat = float(np.nanmean(self._get_aux_data_lut_vector('subsatellite_latitude'))) - actual_sat_alt = float(np.nanmean(self._get_aux_data_lut_vector('platform_altitude'))) + actual_subsat_lon = float(np.nanmean(self._get_aux_data_lut_vector("subsatellite_longitude"))) + actual_subsat_lat = float(np.nanmean(self._get_aux_data_lut_vector("subsatellite_latitude"))) + actual_sat_alt = float(np.nanmean(self._get_aux_data_lut_vector("platform_altitude"))) nominal_and_proj_subsat_lon = float( self.get_and_cache_npxr("data/mtg_geos_projection/attr/longitude_of_projection_origin")) nominal_and_proj_subsat_lat = 0 @@ -405,16 +405,16 @@ def orbital_param(self): self.get_and_cache_npxr("data/mtg_geos_projection/attr/perspective_point_height")) orb_param_dict = { - 'orbital_parameters': { - 'satellite_actual_longitude': actual_subsat_lon, - 'satellite_actual_latitude': actual_subsat_lat, - 'satellite_actual_altitude': actual_sat_alt, - 'satellite_nominal_longitude': nominal_and_proj_subsat_lon, - 'satellite_nominal_latitude': nominal_and_proj_subsat_lat, - 'satellite_nominal_altitude': nominal_and_proj_sat_alt, - 'projection_longitude': nominal_and_proj_subsat_lon, - 'projection_latitude': nominal_and_proj_subsat_lat, - 'projection_altitude': nominal_and_proj_sat_alt, + "orbital_parameters": { + "satellite_actual_longitude": actual_subsat_lon, + "satellite_actual_latitude": actual_subsat_lat, + "satellite_actual_altitude": actual_sat_alt, + "satellite_nominal_longitude": nominal_and_proj_subsat_lon, + "satellite_nominal_latitude": nominal_and_proj_subsat_lat, + "satellite_nominal_altitude": nominal_and_proj_sat_alt, + "projection_longitude": nominal_and_proj_subsat_lon, + "projection_latitude": nominal_and_proj_subsat_lat, + "projection_altitude": nominal_and_proj_sat_alt, }} return orb_param_dict @@ -432,7 +432,7 @@ def _get_dataset_index_map(self, dsname): dv_path = grp_path + "/index_map" data = self[dv_path] - data = data.where(data != data.attrs.get('_FillValue', 65535)) + data = data.where(data != data.attrs.get("_FillValue", 65535)) return data def _get_aux_data_lut_vector(self, aux_data_name): @@ -446,14 +446,14 @@ def _get_aux_data_lut_vector(self, aux_data_name): @staticmethod def _getitem(block, lut): - return lut[block.astype('uint16')] + return lut[block.astype("uint16")] def _get_dataset_aux_data(self, dsname): """Get the auxiliary data arrays using the index map.""" # get index map index_map = self._get_dataset_index_map(_get_channel_name_from_dsname(dsname)) # subtract minimum of index variable (index_offset) - index_map -= np.min(self.get_and_cache_npxr('index')) + index_map -= np.min(self.get_and_cache_npxr("index")) # get lut values from 1-d vector variable lut = self._get_aux_data_lut_vector(_get_aux_data_name_from_dsname(dsname)) @@ -472,14 +472,14 @@ def calc_area_extent(self, key): # if a user requests a pixel quality or index map before the channel data, the # yaml-reader will ask the area extent of the pixel quality/index map field, # which will ultimately end up here - channel_name = _get_channel_name_from_dsname(key['name']) + channel_name = _get_channel_name_from_dsname(key["name"]) # Get metadata for given dataset measured = self.get_channel_measured_group_path(channel_name) # Get start/end line and column of loaded swath. nlines, ncols = self[measured + "/effective_radiance/shape"] - logger.debug('Channel {} resolution: {}'.format(channel_name, ncols)) - logger.debug('Row/Cols: {} / {}'.format(nlines, ncols)) + logger.debug("Channel {} resolution: {}".format(channel_name, ncols)) + logger.debug("Row/Cols: {} / {}".format(nlines, ncols)) # Calculate full globe line extent h = float(self.get_and_cache_npxr("data/mtg_geos_projection/attr/perspective_point_height")) @@ -489,16 +489,16 @@ def calc_area_extent(self, key): coord_radian = self.get_and_cache_npxr(measured + "/{:s}".format(coord)) # TODO remove this check when old versions of IDPF test data ( 0: - coord_radian.attrs['scale_factor'] *= -1 + if coord == "x" and coord_radian.attrs["scale_factor"] > 0: + coord_radian.attrs["scale_factor"] *= -1 # TODO remove this check when old versions of IDPF test data ( 1.1: - logger.info('The variable state/celestial/earth_sun_distance contains unexpected values' - '(mean value is {} AU). Defaulting to 1 AU for reflectance calculation.' - ''.format(sun_earth_distance)) + logger.info("The variable state/celestial/earth_sun_distance contains unexpected values" + "(mean value is {} AU). Defaulting to 1 AU for reflectance calculation." + "".format(sun_earth_distance)) sun_earth_distance = 1 res = 100 * radiance * np.pi * sun_earth_distance ** 2 / cesi diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index cbb47b2c8c..c387326f89 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -41,18 +41,18 @@ class FciL2CommonFunctions(object): @property def spacecraft_name(self): """Return spacecraft name.""" - return self.nc.attrs['platform'] + return self.nc.attrs["platform"] @property def sensor_name(self): """Return instrument name.""" - return self.nc.attrs['data_source'] + return self.nc.attrs["data_source"] @property def ssp_lon(self): """Return longitude at subsatellite point.""" try: - return float(self.nc['mtg_geos_projection'].attrs['longitude_of_projection_origin']) + return float(self.nc["mtg_geos_projection"].attrs["longitude_of_projection_origin"]) except (KeyError, AttributeError): logger.warning(f"ssp_lon could not be obtained from file content, using default value " f"of {SSP_DEFAULT} degrees east instead") @@ -71,11 +71,11 @@ def _get_global_attributes(self): """ attributes = { - 'filename': self.filename, - 'spacecraft_name': self.spacecraft_name, - 'ssp_lon': self.ssp_lon, - 'sensor': self.sensor_name, - 'platform_name': self.spacecraft_name, + "filename": self.filename, + "spacecraft_name": self.spacecraft_name, + "ssp_lon": self.ssp_lon, + "sensor": self.sensor_name, + "platform_name": self.spacecraft_name, } return attributes @@ -86,10 +86,10 @@ def _set_attributes(self, variable, dataset_info, segmented=False): else: xdim, ydim = "number_of_columns", "number_of_rows" - if dataset_info['file_key'] not in ['product_quality', 'product_completeness', 'product_timeliness']: - variable = variable.rename({ydim: 'y', xdim: 'x'}) + if dataset_info["file_key"] not in ["product_quality", "product_completeness", "product_timeliness"]: + variable = variable.rename({ydim: "y", xdim: "x"}) - variable.attrs.setdefault('units', None) + variable.attrs.setdefault("units", None) variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) @@ -116,7 +116,7 @@ def _mask_data(variable, fill_value): fill_value = [fill_value] for val in fill_value: - variable = variable.where(variable != val).astype('float32') + variable = variable.where(variable != val).astype("float32") return variable @@ -139,8 +139,8 @@ def __init__(self, filename, filename_info, filetype_info, with_area_definition= decode_cf=True, mask_and_scale=True, chunks={ - 'number_of_columns': CHUNK_SIZE, - 'number_of_rows': CHUNK_SIZE + "number_of_columns": CHUNK_SIZE, + "number_of_rows": CHUNK_SIZE } ) @@ -148,10 +148,10 @@ def __init__(self, filename, filename_info, filetype_info, with_area_definition= logger.info("Setting `with_area_defintion=False` has no effect on pixel-based products.") # Read metadata which are common to all datasets - self.nlines = self.nc['y'].size - self.ncols = self.nc['x'].size - self._projection = self.nc['mtg_geos_projection'] - self.multi_dims = {'maximum_number_of_layers': 'layer', 'number_of_vis_channels': 'vis_channel_id'} + self.nlines = self.nc["y"].size + self.ncols = self.nc["x"].size + self._projection = self.nc["mtg_geos_projection"] + self.multi_dims = {"maximum_number_of_layers": "layer", "number_of_vis_channels": "vis_channel_id"} def get_area_def(self, key): """Return the area definition.""" @@ -162,9 +162,9 @@ def get_area_def(self, key): def get_dataset(self, dataset_id, dataset_info): """Get dataset using the file_key in dataset_info.""" - var_key = dataset_info['file_key'] - par_name = dataset_info['name'] - logger.debug('Reading in file to get dataset with key %s.', var_key) + var_key = dataset_info["file_key"] + par_name = dataset_info["name"] + logger.debug("Reading in file to get dataset with key %s.", var_key) try: variable = self.nc[var_key] @@ -173,20 +173,20 @@ def get_dataset(self, dataset_id, dataset_info): return None # Compute the area definition - if var_key not in ['product_quality', 'product_completeness', 'product_timeliness']: + if var_key not in ["product_quality", "product_completeness", "product_timeliness"]: self._area_def = self._compute_area_def(dataset_id) if any(dim_id in dataset_info.keys() for dim_id in self.multi_dims.values()): variable = self._slice_dataset(variable, dataset_info, self.multi_dims) - if par_name == 'retrieved_cloud_optical_thickness': + if par_name == "retrieved_cloud_optical_thickness": variable = self.get_total_cot(variable) - if dataset_info['file_type'] == 'nc_fci_test_clm': + if dataset_info["file_type"] == "nc_fci_test_clm": variable = self._decode_clm_test_data(variable, dataset_info) - if 'fill_value' in dataset_info: - variable = self._mask_data(variable, dataset_info['fill_value']) + if "fill_value" in dataset_info: + variable = self._mask_data(variable, dataset_info["fill_value"]) variable = self._set_attributes(variable, dataset_info) @@ -194,9 +194,9 @@ def get_dataset(self, dataset_id, dataset_info): @staticmethod def _decode_clm_test_data(variable, dataset_info): - if dataset_info['file_key'] != 'cloud_mask_cmrt6_test_result': - variable = variable.astype('uint32') - variable.values = (variable.values >> dataset_info['extract_byte'] << 31 >> 31).astype('int8') + if dataset_info["file_key"] != "cloud_mask_cmrt6_test_result": + variable = variable.astype("uint32") + variable.values = (variable.values >> dataset_info["extract_byte"] << 31 >> 31).astype("int8") return variable @@ -210,8 +210,8 @@ def _compute_area_def(self, dataset_id): area_extent = self._get_area_extent() area_naming, proj_dict = self._get_proj_area(dataset_id) area_def = geometry.AreaDefinition( - area_naming['area_id'], - area_naming['description'], + area_naming["area_id"], + area_naming["description"], "", proj_dict, self.ncols, @@ -223,15 +223,15 @@ def _compute_area_def(self, dataset_id): def _get_area_extent(self): """Calculate area extent of dataset.""" # Load and convert x/y coordinates to degrees as required by the make_ext function - x = self.nc['x'] - y = self.nc['y'] + x = self.nc["x"] + y = self.nc["y"] x_deg = np.degrees(x) y_deg = np.degrees(y) # Select the extreme points and calcualte area extent (not: these refer to pixel center) ll_x, ur_x = -x_deg.values[0], -x_deg.values[-1] ll_y, ur_y = y_deg.values[-1], y_deg.values[0] - h = float(self._projection.attrs['perspective_point_height']) + h = float(self._projection.attrs["perspective_point_height"]) area_extent_pixel_center = make_ext(ll_x, ur_x, ll_y, ur_y, h) # Shift area extent by half a pixel to get the area extent w.r.t. the dataset/pixel corners @@ -244,30 +244,30 @@ def _get_area_extent(self): def _get_proj_area(self, dataset_id): """Extract projection and area information.""" # Read the projection data from the mtg_geos_projection variable - a = float(self._projection.attrs['semi_major_axis']) - h = float(self._projection.attrs['perspective_point_height']) + a = float(self._projection.attrs["semi_major_axis"]) + h = float(self._projection.attrs["perspective_point_height"]) # Some L2PF test data files have a typo in the keyname for the inverse flattening parameter. Use a default value # as fallback until all L2PF test files are correctly formatted. - rf = float(self._projection.attrs.get('inverse_flattening', 298.257223563)) + rf = float(self._projection.attrs.get("inverse_flattening", 298.257223563)) res = dataset_id["resolution"] - area_naming_input_dict = {'platform_name': 'mtg', - 'instrument_name': 'fci', - 'resolution': res, + area_naming_input_dict = {"platform_name": "mtg", + "instrument_name": "fci", + "resolution": res, } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('fci', self.ssp_lon)}) + **get_service_mode("fci", self.ssp_lon)}) - proj_dict = {'a': a, - 'lon_0': self.ssp_lon, - 'h': h, + proj_dict = {"a": a, + "lon_0": self.ssp_lon, + "h": h, "rf": rf, - 'proj': 'geos', - 'units': 'm', - "sweep": 'y'} + "proj": "geos", + "units": "m", + "sweep": "y"} return area_naming, proj_dict @@ -281,7 +281,7 @@ def get_total_cot(variable): attrs = variable.attrs variable = 10 ** variable variable = variable.fillna(0.) - variable = variable.sum(dim='maximum_number_of_layers', keep_attrs=True) + variable = variable.sum(dim="maximum_number_of_layers", keep_attrs=True) variable = variable.where(variable != 0., np.nan) variable = np.log10(variable) variable.attrs = attrs @@ -301,19 +301,19 @@ def __init__(self, filename, filename_info, filetype_info, with_area_definition= decode_cf=True, mask_and_scale=True, chunks={ - 'number_of_FoR_cols': CHUNK_SIZE, - 'number_of_FoR_rows': CHUNK_SIZE + "number_of_FoR_cols": CHUNK_SIZE, + "number_of_FoR_rows": CHUNK_SIZE } ) # Read metadata which are common to all datasets - self.nlines = self.nc['number_of_FoR_rows'].size - self.ncols = self.nc['number_of_FoR_cols'].size + self.nlines = self.nc["number_of_FoR_rows"].size + self.ncols = self.nc["number_of_FoR_cols"].size self.with_adef = with_area_definition self.multi_dims = { - 'number_of_categories': 'category_id', 'number_of_channels': 'channel_id', - 'number_of_vis_channels': 'vis_channel_id', 'number_of_ir_channels': 'ir_channel_id', - 'number_test': 'test_id', + "number_of_categories": "category_id", "number_of_channels": "channel_id", + "number_of_vis_channels": "vis_channel_id", "number_of_ir_channels": "ir_channel_id", + "number_test": "test_id", } def get_area_def(self, key): @@ -325,8 +325,8 @@ def get_area_def(self, key): def get_dataset(self, dataset_id, dataset_info): """Get dataset using the file_key in dataset_info.""" - var_key = dataset_info['file_key'] - logger.debug('Reading in file to get dataset with key %s.', var_key) + var_key = dataset_info["file_key"] + logger.debug("Reading in file to get dataset with key %s.", var_key) try: variable = self.nc[var_key] @@ -337,16 +337,16 @@ def get_dataset(self, dataset_id, dataset_info): if any(dim_id in dataset_info.keys() for dim_id in self.multi_dims.values()): variable = self._slice_dataset(variable, dataset_info, self.multi_dims) - if self.with_adef and var_key not in ['longitude', 'latitude', - 'product_quality', 'product_completeness', 'product_timeliness']: + if self.with_adef and var_key not in ["longitude", "latitude", + "product_quality", "product_completeness", "product_timeliness"]: self._area_def = self._construct_area_def(dataset_id) # coordinates are not relevant when returning data with an AreaDefinition - if 'coordinates' in dataset_info.keys(): - del dataset_info['coordinates'] + if "coordinates" in dataset_info.keys(): + del dataset_info["coordinates"] - if 'fill_value' in dataset_info: - variable = self._mask_data(variable, dataset_info['fill_value']) + if "fill_value" in dataset_info: + variable = self._mask_data(variable, dataset_info["fill_value"]) variable = self._set_attributes(variable, dataset_info, segmented=True) @@ -361,19 +361,19 @@ def _construct_area_def(self, dataset_id): """ res = dataset_id["resolution"] - area_naming_input_dict = {'platform_name': 'mtg', - 'instrument_name': 'fci', - 'resolution': res, + area_naming_input_dict = {"platform_name": "mtg", + "instrument_name": "fci", + "resolution": res, } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('fci', self.ssp_lon)}) + **get_service_mode("fci", self.ssp_lon)}) # Construct area definition from standardized area definition. - stand_area_def = get_area_def(area_naming['area_id']) + stand_area_def = get_area_def(area_naming["area_id"]) if (stand_area_def.x_size != self.ncols) | (stand_area_def.y_size != self.nlines): - raise NotImplementedError('Unrecognised AreaDefinition.') + raise NotImplementedError("Unrecognised AreaDefinition.") mod_area_extent = self._modify_area_extent(stand_area_def.area_extent) diff --git a/satpy/readers/file_handlers.py b/satpy/readers/file_handlers.py index 0c47553b0d..3fdeed1edc 100644 --- a/satpy/readers/file_handlers.py +++ b/satpy/readers/file_handlers.py @@ -112,16 +112,16 @@ def combine_info(self, all_infos): """ combined_info = combine_metadata(*all_infos) - new_dict = self._combine(all_infos, min, 'start_time', 'start_orbit') - new_dict.update(self._combine(all_infos, max, 'end_time', 'end_orbit')) + new_dict = self._combine(all_infos, min, "start_time", "start_orbit") + new_dict.update(self._combine(all_infos, max, "end_time", "end_orbit")) new_dict.update(self._combine_orbital_parameters(all_infos)) new_dict.update(self._combine_time_parameters(all_infos)) try: - area = SwathDefinition(lons=np.ma.vstack([info['area'].lons for info in all_infos]), - lats=np.ma.vstack([info['area'].lats for info in all_infos])) - area.name = '_'.join([info['area'].name for info in all_infos]) - combined_info['area'] = area + area = SwathDefinition(lons=np.ma.vstack([info["area"].lons for info in all_infos]), + lats=np.ma.vstack([info["area"].lats for info in all_infos])) + area.name = "_".join([info["area"].name for info in all_infos]) + combined_info["area"] = area except KeyError: pass @@ -129,7 +129,7 @@ def combine_info(self, all_infos): return new_dict def _combine_orbital_parameters(self, all_infos): - orb_params = [info.get('orbital_parameters', {}) for info in all_infos] + orb_params = [info.get("orbital_parameters", {}) for info in all_infos] if not all(orb_params): return {} # Collect all available keys @@ -138,15 +138,15 @@ def _combine_orbital_parameters(self, all_infos): orb_params_comb.update(d) # Average known keys - keys = ['projection_longitude', 'projection_latitude', 'projection_altitude', - 'satellite_nominal_longitude', 'satellite_nominal_latitude', - 'satellite_actual_longitude', 'satellite_actual_latitude', 'satellite_actual_altitude', - 'nadir_longitude', 'nadir_latitude'] + keys = ["projection_longitude", "projection_latitude", "projection_altitude", + "satellite_nominal_longitude", "satellite_nominal_latitude", + "satellite_actual_longitude", "satellite_actual_latitude", "satellite_actual_altitude", + "nadir_longitude", "nadir_latitude"] orb_params_comb.update(self._combine(orb_params, np.mean, *keys)) - return {'orbital_parameters': orb_params_comb} + return {"orbital_parameters": orb_params_comb} def _combine_time_parameters(self, all_infos): - time_params = [info.get('time_parameters', {}) for info in all_infos] + time_params = [info.get("time_parameters", {}) for info in all_infos] if not all(time_params): return {} # Collect all available keys @@ -155,26 +155,26 @@ def _combine_time_parameters(self, all_infos): time_params_comb.update(d) start_keys = ( - 'nominal_start_time', - 'observation_start_time', + "nominal_start_time", + "observation_start_time", ) end_keys = ( - 'nominal_end_time', - 'observation_end_time', + "nominal_end_time", + "observation_end_time", ) time_params_comb.update(self._combine(time_params, min, *start_keys)) time_params_comb.update(self._combine(time_params, max, *end_keys)) - return {'time_parameters': time_params_comb} + return {"time_parameters": time_params_comb} @property def start_time(self): """Get start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def sensor_names(self): @@ -197,7 +197,7 @@ def file_type_matches(self, ds_ftype): """ if not isinstance(ds_ftype, (list, tuple)): ds_ftype = [ds_ftype] - if self.filetype_info['file_type'] in ds_ftype: + if self.filetype_info["file_type"] in ds_ftype: return True return None @@ -295,4 +295,4 @@ def available_datasets(self, configured_datasets=None): # file handler so let's yield early yield is_avail, ds_info continue - yield self.file_type_matches(ds_info['file_type']), ds_info + yield self.file_type_matches(ds_info["file_type"]), ds_info diff --git a/satpy/readers/fy4_base.py b/satpy/readers/fy4_base.py index 9b6b364420..144e559858 100644 --- a/satpy/readers/fy4_base.py +++ b/satpy/readers/fy4_base.py @@ -46,7 +46,7 @@ def __init__(self, filename, filename_info, filetype_info): """Init filehandler.""" super(FY4Base, self).__init__(filename, filename_info, filetype_info) - self.sensor = filename_info['instrument'] + self.sensor = filename_info["instrument"] # info of 250m, 500m, 1km, 2km and 4km data self._COFF_list = [21983.5, 10991.5, 5495.5, 2747.5, 1373.5] @@ -55,17 +55,17 @@ def __init__(self, filename, filename_info, filetype_info): self._CFAC_list = [163730199.0, 81865099.0, 40932549.0, 20466274.0, 10233137.0] self._LFAC_list = [163730199.0, 81865099.0, 40932549.0, 20466274.0, 10233137.0] - self.PLATFORM_NAMES = {'FY4A': 'FY-4A', - 'FY4B': 'FY-4B', - 'FY4C': 'FY-4C'} + self.PLATFORM_NAMES = {"FY4A": "FY-4A", + "FY4B": "FY-4B", + "FY4C": "FY-4C"} try: - self.PLATFORM_ID = self.PLATFORM_NAMES[filename_info['platform_id']] + self.PLATFORM_ID = self.PLATFORM_NAMES[filename_info["platform_id"]] except KeyError: raise KeyError(f"Unsupported platform ID: {filename_info['platform_id']}") - self.CHANS_ID = 'NOMChannel' - self.SAT_ID = 'NOMSatellite' - self.SUN_ID = 'NOMSun' + self.CHANS_ID = "NOMChannel" + self.SAT_ID = "NOMSatellite" + self.SUN_ID = "NOMSun" @staticmethod def scale(dn, slope, offset): @@ -112,10 +112,10 @@ def _getitem(block, lut): def reflectance_coeffs(self): """Retrieve the reflectance calibration coefficients from the HDF file.""" # using the corresponding SCALE and OFFSET - if self.PLATFORM_ID == 'FY-4A': - cal_coef = 'CALIBRATION_COEF(SCALE+OFFSET)' - elif self.PLATFORM_ID == 'FY-4B': - cal_coef = 'Calibration/CALIBRATION_COEF(SCALE+OFFSET)' + if self.PLATFORM_ID == "FY-4A": + cal_coef = "CALIBRATION_COEF(SCALE+OFFSET)" + elif self.PLATFORM_ID == "FY-4B": + cal_coef = "Calibration/CALIBRATION_COEF(SCALE+OFFSET)" else: raise KeyError(f"Unsupported platform ID for calibration: {self.PLATFORM_ID}") return self.get(cal_coef).values @@ -123,58 +123,58 @@ def reflectance_coeffs(self): def calibrate(self, data, ds_info, ds_name, file_key): """Calibrate the data.""" # Check if calibration is present, if not assume dataset is an angle - calibration = ds_info.get('calibration') + calibration = ds_info.get("calibration") # Return raw data in case of counts or no calibration - if calibration in ('counts', None): - data.attrs['units'] = ds_info['units'] - ds_info['valid_range'] = data.attrs['valid_range'] - ds_info['fill_value'] = data.attrs['FillValue'].item() - elif calibration == 'reflectance': + if calibration in ("counts", None): + data.attrs["units"] = ds_info["units"] + ds_info["valid_range"] = data.attrs["valid_range"] + ds_info["fill_value"] = data.attrs["FillValue"].item() + elif calibration == "reflectance": channel_index = int(file_key[-2:]) - 1 data = self.calibrate_to_reflectance(data, channel_index, ds_info) - elif calibration == 'brightness_temperature': + elif calibration == "brightness_temperature": data = self.calibrate_to_bt(data, ds_info, ds_name) - elif calibration == 'radiance': + elif calibration == "radiance": raise NotImplementedError("Calibration to radiance is not supported.") # Apply range limits, but not for counts or we convert to float! - if calibration != 'counts': - data = data.where((data >= min(data.attrs['valid_range'])) & - (data <= max(data.attrs['valid_range']))) + if calibration != "counts": + data = data.where((data >= min(data.attrs["valid_range"])) & + (data <= max(data.attrs["valid_range"]))) else: - data.attrs['_FillValue'] = data.attrs['FillValue'].item() + data.attrs["_FillValue"] = data.attrs["FillValue"].item() return data def calibrate_to_reflectance(self, data, channel_index, ds_info): """Calibrate to reflectance [%].""" logger.debug("Calibrating to reflectances") # using the corresponding SCALE and OFFSET - if self.sensor != 'AGRI' and self.sensor != 'GHI': - raise ValueError(f'Unsupported sensor type: {self.sensor}') + if self.sensor != "AGRI" and self.sensor != "GHI": + raise ValueError(f"Unsupported sensor type: {self.sensor}") coeffs = self.reflectance_coeffs num_channel = coeffs.shape[0] - if self.sensor == 'AGRI' and num_channel == 1: + if self.sensor == "AGRI" and num_channel == 1: # only channel_2, resolution = 500 m channel_index = 0 - data.data = da.where(data.data == data.attrs['FillValue'].item(), np.nan, data.data) - data.attrs['scale_factor'] = coeffs[channel_index, 0].item() - data.attrs['add_offset'] = coeffs[channel_index, 1].item() - data = self.scale(data, data.attrs['scale_factor'], data.attrs['add_offset']) + data.data = da.where(data.data == data.attrs["FillValue"].item(), np.nan, data.data) + data.attrs["scale_factor"] = coeffs[channel_index, 0].item() + data.attrs["add_offset"] = coeffs[channel_index, 1].item() + data = self.scale(data, data.attrs["scale_factor"], data.attrs["add_offset"]) data *= 100 - ds_info['valid_range'] = (data.attrs['valid_range'] * data.attrs['scale_factor'] + data.attrs['add_offset']) - ds_info['valid_range'] = ds_info['valid_range'] * 100 + ds_info["valid_range"] = (data.attrs["valid_range"] * data.attrs["scale_factor"] + data.attrs["add_offset"]) + ds_info["valid_range"] = ds_info["valid_range"] * 100 return data def calibrate_to_bt(self, data, ds_info, ds_name): """Calibrate to Brightness Temperatures [K].""" logger.debug("Calibrating to brightness_temperature") - if self.sensor not in ['GHI', 'AGRI']: + if self.sensor not in ["GHI", "AGRI"]: raise ValueError("Error, sensor must be GHI or AGRI.") # The key is sometimes prefixes with `Calibration/` so we try both options here - lut_key = ds_info.get('lut_key', ds_name) + lut_key = ds_info.get("lut_key", ds_name) try: lut = self[lut_key] except KeyError: @@ -183,66 +183,66 @@ def calibrate_to_bt(self, data, ds_info, ds_name): # the value of dn is the index of brightness_temperature data = self.apply_lut(data, lut) - ds_info['valid_range'] = lut.attrs['valid_range'] + ds_info["valid_range"] = lut.attrs["valid_range"] return data @property def start_time(self): """Get the start time.""" - start_time = self['/attr/Observing Beginning Date'] + 'T' + self['/attr/Observing Beginning Time'] + 'Z' + start_time = self["/attr/Observing Beginning Date"] + "T" + self["/attr/Observing Beginning Time"] + "Z" try: - return datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.%fZ") except ValueError: # For some data there is no sub-second component - return datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%SZ') + return datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%SZ") @property def end_time(self): """Get the end time.""" - end_time = self['/attr/Observing Ending Date'] + 'T' + self['/attr/Observing Ending Time'] + 'Z' + end_time = self["/attr/Observing Ending Date"] + "T" + self["/attr/Observing Ending Time"] + "Z" try: - return datetime.strptime(end_time, '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S.%fZ") except ValueError: # For some data there is no sub-second component - return datetime.strptime(end_time, '%Y-%m-%dT%H:%M:%SZ') + return datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%SZ") def get_area_def(self, key): """Get the area definition.""" # Coordination Group for Meteorological Satellites LRIT/HRIT Global Specification # https://www.cgms-info.org/documents/cgms-lrit-hrit-global-specification-(v2-8-of-30-oct-2013).pdf - res = key['resolution'] + res = key["resolution"] pdict = {} - begin_cols = float(self.file_content['/attr/Begin Pixel Number']) - end_lines = float(self.file_content['/attr/End Line Number']) - pdict['coff'] = self._COFF_list[RESOLUTION_LIST.index(res)] - begin_cols + 1 - pdict['loff'] = -self._LOFF_list[RESOLUTION_LIST.index(res)] + end_lines + 1 + begin_cols = float(self.file_content["/attr/Begin Pixel Number"]) + end_lines = float(self.file_content["/attr/End Line Number"]) + pdict["coff"] = self._COFF_list[RESOLUTION_LIST.index(res)] - begin_cols + 1 + pdict["loff"] = -self._LOFF_list[RESOLUTION_LIST.index(res)] + end_lines + 1 - pdict['cfac'] = self._CFAC_list[RESOLUTION_LIST.index(res)] - pdict['lfac'] = self._LFAC_list[RESOLUTION_LIST.index(res)] + pdict["cfac"] = self._CFAC_list[RESOLUTION_LIST.index(res)] + pdict["lfac"] = self._LFAC_list[RESOLUTION_LIST.index(res)] try: - pdict['a'] = float(self.file_content['/attr/Semimajor axis of ellipsoid']) + pdict["a"] = float(self.file_content["/attr/Semimajor axis of ellipsoid"]) except KeyError: - pdict['a'] = float(self.file_content['/attr/dEA']) - if pdict['a'] < 10000: - pdict['a'] = pdict['a'] * 1E3 # equator radius (m) + pdict["a"] = float(self.file_content["/attr/dEA"]) + if pdict["a"] < 10000: + pdict["a"] = pdict["a"] * 1E3 # equator radius (m) try: - pdict['b'] = float(self.file_content['/attr/Semiminor axis of ellipsoid']) + pdict["b"] = float(self.file_content["/attr/Semiminor axis of ellipsoid"]) except KeyError: - pdict['b'] = pdict['a'] * (1 - 1 / self.file_content['/attr/dObRecFlat']) # polar radius (m) + pdict["b"] = pdict["a"] * (1 - 1 / self.file_content["/attr/dObRecFlat"]) # polar radius (m) - pdict['h'] = self.file_content['/attr/NOMSatHeight'] # the altitude of satellite (m) - if pdict['h'] > 42000000.0: - pdict['h'] = pdict['h'] - pdict['a'] + pdict["h"] = self.file_content["/attr/NOMSatHeight"] # the altitude of satellite (m) + if pdict["h"] > 42000000.0: + pdict["h"] = pdict["h"] - pdict["a"] - pdict['ssp_lon'] = float(self.file_content['/attr/NOMCenterLon']) - pdict['nlines'] = float(self.file_content['/attr/RegLength']) - pdict['ncols'] = float(self.file_content['/attr/RegWidth']) + pdict["ssp_lon"] = float(self.file_content["/attr/NOMCenterLon"]) + pdict["nlines"] = float(self.file_content["/attr/RegLength"]) + pdict["ncols"] = float(self.file_content["/attr/RegWidth"]) - pdict['scandir'] = 'N2S' - pdict['a_desc'] = "FY-4 {} area".format(self.filename_info['observation_type']) - pdict['a_name'] = f'{self.filename_info["observation_type"]}_{res}m' - pdict['p_id'] = f'FY-4, {res}m' + pdict["scandir"] = "N2S" + pdict["a_desc"] = "FY-4 {} area".format(self.filename_info["observation_type"]) + pdict["a_name"] = f'{self.filename_info["observation_type"]}_{res}m' + pdict["p_id"] = f"FY-4, {res}m" area_extent = get_area_extent(pdict) area_extent = (area_extent[0], diff --git a/satpy/readers/generic_image.py b/satpy/readers/generic_image.py index 3fb8c6b560..1ba160095f 100644 --- a/satpy/readers/generic_image.py +++ b/satpy/readers/generic_image.py @@ -40,13 +40,13 @@ CHUNK_SIZE = get_legacy_chunk_size() -BANDS = {1: ['L'], - 2: ['L', 'A'], - 3: ['R', 'G', 'B'], - 4: ['R', 'G', 'B', 'A']} +BANDS = {1: ["L"], + 2: ["L", "A"], + 3: ["R", "G", "B"], + 4: ["R", "G", "B", "A"]} -NODATA_HANDLING_FILLVALUE = 'fill_value' -NODATA_HANDLING_NANMASK = 'nan_mask' +NODATA_HANDLING_FILLVALUE = "fill_value" +NODATA_HANDLING_NANMASK = "nan_mask" logger = logging.getLogger(__name__) @@ -60,10 +60,10 @@ def __init__(self, filename, filename_info, filetype_info): filename, filename_info, filetype_info) self.finfo = filename_info try: - self.finfo['end_time'] = self.finfo['start_time'] + self.finfo["end_time"] = self.finfo["start_time"] except KeyError: pass - self.finfo['filename'] = self.filename + self.finfo["filename"] = self.filename self.file_content = {} self.area = None self.dataset_name = None @@ -71,10 +71,10 @@ def __init__(self, filename, filename_info, filetype_info): def read(self): """Read the image.""" - dataset = rasterio.open(self.finfo['filename']) + dataset = rasterio.open(self.finfo["filename"]) # Create area definition - if hasattr(dataset, 'crs') and dataset.crs is not None: + if hasattr(dataset, "crs") and dataset.crs is not None: self.area = utils.get_area_def_from_raster(dataset) data = xr.open_dataset(self.finfo["filename"], engine="rasterio", @@ -90,13 +90,13 @@ def read(self): attrs = data.attrs.copy() # Rename to Satpy convention - data = data.rename({'band': 'bands'}) + data = data.rename({"band": "bands"}) # Rename bands to [R, G, B, A], or a subset of those - data['bands'] = BANDS[data.bands.size] + data["bands"] = BANDS[data.bands.size] data.attrs = attrs - self.dataset_name = 'image' + self.dataset_name = "image" self.file_content[self.dataset_name] = data def get_area_def(self, dsid): @@ -108,16 +108,16 @@ def get_area_def(self, dsid): @property def start_time(self): """Return start time.""" - return self.finfo['start_time'] + return self.finfo["start_time"] @property def end_time(self): """Return end time.""" - return self.finfo['end_time'] + return self.finfo["end_time"] def get_dataset(self, key, info): """Get a dataset from the file.""" - ds_name = self.dataset_name if self.dataset_name else key['name'] + ds_name = self.dataset_name if self.dataset_name else key["name"] logger.debug("Reading '%s.'", ds_name) data = self.file_content[ds_name] @@ -149,8 +149,8 @@ def _mask_image_data(data, info): for i in range(data.shape[0])]) data.data = masked_data data = data.sel(bands=BANDS[data.bands.size - 1]) - elif hasattr(data, 'nodatavals') and data.nodatavals: - data = _handle_nodatavals(data, info.get('nodata_handling', NODATA_HANDLING_FILLVALUE)) + elif hasattr(data, "nodatavals") and data.nodatavals: + data = _handle_nodatavals(data, info.get("nodata_handling", NODATA_HANDLING_FILLVALUE)) return data @@ -162,7 +162,7 @@ def _handle_nodatavals(data, nodata_handling): masked_data = da.stack([da.where(data.data[i, :, :] == nodataval, np.nan, data.data[i, :, :]) for i, nodataval in enumerate(data.nodatavals)]) data.data = masked_data - data.attrs['_FillValue'] = np.nan + data.attrs["_FillValue"] = np.nan elif nodata_handling == NODATA_HANDLING_FILLVALUE: # keep data as it is but set _FillValue attribute to provided # nodatavalue (first one as it has to be the same for all bands at least @@ -170,5 +170,5 @@ def _handle_nodatavals(data, nodata_handling): fill_value = data.nodatavals[0] if np.issubdtype(data.dtype, np.integer): fill_value = int(fill_value) - data.attrs['_FillValue'] = fill_value + data.attrs["_FillValue"] = fill_value return data diff --git a/satpy/readers/geocat.py b/satpy/readers/geocat.py index 5086cd899b..185e7d3c13 100644 --- a/satpy/readers/geocat.py +++ b/satpy/readers/geocat.py @@ -44,14 +44,14 @@ CF_UNITS = { - 'none': '1', + "none": "1", } # GEOCAT currently doesn't include projection information in it's files GEO_PROJS = { - 'GOES-16': '+proj=geos +lon_0={lon_0:0.02f} +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs', - 'GOES-17': '+proj=geos +lon_0={lon_0:0.02f} +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs', - 'HIMAWARI-8': '+proj=geos +over +lon_0=140.7 +h=35785863 +a=6378137 +b=6356752.299581327 +units=m +no_defs', + "GOES-16": "+proj=geos +lon_0={lon_0:0.02f} +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs", + "GOES-17": "+proj=geos +lon_0={lon_0:0.02f} +h=35786023.0 +a=6378137.0 +b=6356752.31414 +sweep=x +units=m +no_defs", + "HIMAWARI-8": "+proj=geos +over +lon_0=140.7 +h=35785863 +a=6378137 +b=6356752.299581327 +units=m +no_defs", } @@ -72,29 +72,29 @@ class GEOCATFileHandler(NetCDF4FileHandler): def __init__(self, filename, filename_info, filetype_info, **kwargs): """Open and perform initial investigation of NetCDF file.""" - kwargs.setdefault('xarray_kwargs', {}).setdefault( - 'engine', "netcdf4") - kwargs.setdefault('xarray_kwargs', {}).setdefault( - 'decode_times', False) + kwargs.setdefault("xarray_kwargs", {}).setdefault( + "engine", "netcdf4") + kwargs.setdefault("xarray_kwargs", {}).setdefault( + "decode_times", False) super(GEOCATFileHandler, self).__init__( filename, filename_info, filetype_info, xarray_kwargs=kwargs["xarray_kwargs"]) sensors = { - 'goes': 'goes_imager', - 'himawari8': 'ahi', - 'goes16': 'abi', # untested - 'goesr': 'abi', # untested + "goes": "goes_imager", + "himawari8": "ahi", + "goes16": "abi", # untested + "goesr": "abi", # untested } platforms: dict[str, str] = { } resolutions = { - 'abi': { + "abi": { 1: 1002.0086577437705, 2: 2004.0173154875411, }, - 'ahi': { + "ahi": { 1: 999.9999820317674, # assumption 2: 1999.999964063535, 4: 3999.99992812707, @@ -121,7 +121,7 @@ def get_platform(self, platform): return platform def _get_proj(self, platform, ref_lon): - if platform == 'GOES-16' and -76. < ref_lon < -74.: + if platform == "GOES-16" and -76. < ref_lon < -74.: # geocat file holds the *actual* subsatellite point, not the # projection (-75.2 actual versus -75 projection) ref_lon = -75. @@ -130,33 +130,33 @@ def _get_proj(self, platform, ref_lon): @property def sensor_names(self): """Get sensor names.""" - return [self.get_sensor(self['/attr/Sensor_Name'])] + return [self.get_sensor(self["/attr/Sensor_Name"])] @property def start_time(self): """Get start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def is_geo(self): """Check platform.""" - platform = self.get_platform(self['/attr/Platform_Name']) + platform = self.get_platform(self["/attr/Platform_Name"]) return platform in GEO_PROJS @property def resolution(self): """Get resolution.""" - elem_res = self['/attr/Element_Resolution'] + elem_res = self["/attr/Element_Resolution"] return int(elem_res * 1000) def _calc_area_resolution(self, ds_res): elem_res = round(ds_res / 1000.) # mimic 'Element_Resolution' attribute from above - sensor = self.get_sensor(self['/attr/Sensor_Name']) + sensor = self.get_sensor(self["/attr/Sensor_Name"]) return self.resolutions.get(sensor, {}).get(int(elem_res), elem_res * 1000.) @@ -174,27 +174,27 @@ def available_datasets(self, configured_datasets=None): """ res = self.resolution - coordinates = ('pixel_longitude', 'pixel_latitude') + coordinates = ("pixel_longitude", "pixel_latitude") handled_variables = set() # update previously configured datasets for is_avail, ds_info in (configured_datasets or []): - this_res = ds_info.get('resolution') - this_coords = ds_info.get('coordinates') + this_res = ds_info.get("resolution") + this_coords = ds_info.get("coordinates") # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info - var_name = ds_info.get('file_key', ds_info['name']) - matches = self.file_type_matches(ds_info['file_type']) + var_name = ds_info.get("file_key", ds_info["name"]) + matches = self.file_type_matches(ds_info["file_type"]) # we can confidently say that we can provide this dataset and can # provide more info if matches and var_name in self and this_res != res: handled_variables.add(var_name) new_info = ds_info.copy() # don't mess up the above yielded - new_info['resolution'] = res + new_info["resolution"] = res if not self.is_geo and this_coords is None: - new_info['coordinates'] = coordinates + new_info["coordinates"] = coordinates yield True, new_info elif is_avail is None: # if we didn't know how to handle this dataset and no one else did @@ -207,21 +207,21 @@ def available_datasets(self, configured_datasets=None): continue if isinstance(val, netCDF4.Variable): ds_info = { - 'file_type': self.filetype_info['file_type'], - 'resolution': res, - 'name': var_name, + "file_type": self.filetype_info["file_type"], + "resolution": res, + "name": var_name, } if not self.is_geo: - ds_info['coordinates'] = coordinates + ds_info["coordinates"] = coordinates yield True, ds_info def get_shape(self, dataset_id, ds_info): """Get shape.""" - var_name = ds_info.get('file_key', dataset_id['name']) - return self[var_name + '/shape'] + var_name = ds_info.get("file_key", dataset_id["name"]) + return self[var_name + "/shape"] def _first_good_nav(self, lon_arr, lat_arr): - if hasattr(lon_arr, 'mask'): + if hasattr(lon_arr, "mask"): good_indexes = np.nonzero(~lon_arr.mask) else: # no masked values found in auto maskandscale @@ -247,9 +247,9 @@ def _get_extents(self, proj, res, lon_arr, lat_arr): def _load_nav(self, name): nav = self[name] - factor = self[name + '/attr/scale_factor'] - offset = self[name + '/attr/add_offset'] - fill = self[name + '/attr/_FillValue'] + factor = self[name + "/attr/scale_factor"] + offset = self[name + "/attr/add_offset"] + fill = self[name + "/attr/_FillValue"] nav = nav[:] mask = nav == fill nav = np.ma.masked_array(nav * factor + offset, mask=mask) @@ -260,15 +260,15 @@ def get_area_def(self, dsid): if not self.is_geo: raise NotImplementedError("Don't know how to get the Area Definition for this file") - platform = self.get_platform(self['/attr/Platform_Name']) - res = self._calc_area_resolution(dsid['resolution']) - proj = self._get_proj(platform, float(self['/attr/Subsatellite_Longitude'])) - area_name = '{} {} Area at {}m'.format( + platform = self.get_platform(self["/attr/Platform_Name"]) + res = self._calc_area_resolution(dsid["resolution"]) + proj = self._get_proj(platform, float(self["/attr/Subsatellite_Longitude"])) + area_name = "{} {} Area at {}m".format( platform, - self.metadata.get('sector_id', ''), + self.metadata.get("sector_id", ""), int(res)) - lon = self._load_nav('pixel_longitude') - lat = self._load_nav('pixel_latitude') + lon = self._load_nav("pixel_longitude") + lat = self._load_nav("pixel_latitude") extents = self._get_extents(proj, res, lon, lat) area_def = geometry.AreaDefinition( area_name, @@ -283,36 +283,36 @@ def get_area_def(self, dsid): def get_metadata(self, dataset_id, ds_info): """Get metadata.""" - var_name = ds_info.get('file_key', dataset_id['name']) + var_name = ds_info.get("file_key", dataset_id["name"]) shape = self.get_shape(dataset_id, ds_info) - info = getattr(self[var_name], 'attrs', {}) - info['shape'] = shape + info = getattr(self[var_name], "attrs", {}) + info["shape"] = shape info.update(ds_info) - u = info.get('units') + u = info.get("units") if u in CF_UNITS: # CF compliance - info['units'] = CF_UNITS[u] + info["units"] = CF_UNITS[u] - info['sensor'] = self.get_sensor(self['/attr/Sensor_Name']) - info['platform_name'] = self.get_platform(self['/attr/Platform_Name']) - info['resolution'] = dataset_id['resolution'] - if var_name == 'pixel_longitude': - info['standard_name'] = 'longitude' - elif var_name == 'pixel_latitude': - info['standard_name'] = 'latitude' + info["sensor"] = self.get_sensor(self["/attr/Sensor_Name"]) + info["platform_name"] = self.get_platform(self["/attr/Platform_Name"]) + info["resolution"] = dataset_id["resolution"] + if var_name == "pixel_longitude": + info["standard_name"] = "longitude" + elif var_name == "pixel_latitude": + info["standard_name"] = "latitude" return info def get_dataset(self, dataset_id, ds_info): """Get dataset.""" - var_name = ds_info.get('file_key', dataset_id['name']) + var_name = ds_info.get("file_key", dataset_id["name"]) # FUTURE: Metadata retrieval may be separate info = self.get_metadata(dataset_id, ds_info) data = self[var_name] - fill = self[var_name + '/attr/_FillValue'] - factor = self.get(var_name + '/attr/scale_factor') - offset = self.get(var_name + '/attr/add_offset') - valid_range = self.get(var_name + '/attr/valid_range') + fill = self[var_name + "/attr/_FillValue"] + factor = self.get(var_name + "/attr/scale_factor") + offset = self.get(var_name + "/attr/add_offset") + valid_range = self.get(var_name + "/attr/valid_range") data = data.where(data != fill) if valid_range is not None: @@ -321,5 +321,5 @@ def get_dataset(self, dataset_id, ds_info): data = data * factor + offset data.attrs.update(info) - data = data.rename({'lines': 'y', 'elements': 'x'}) + data = data.rename({"lines": "y", "elements": "x"}) return data diff --git a/satpy/readers/gerb_l2_hr_h5.py b/satpy/readers/gerb_l2_hr_h5.py index f663b3040f..0bf918d68f 100644 --- a/satpy/readers/gerb_l2_hr_h5.py +++ b/satpy/readers/gerb_l2_hr_h5.py @@ -40,10 +40,10 @@ def gerb_get_dataset(ds, ds_info): The routine takes into account the quantisation factor and fill values. """ ds_attrs = ds.attrs - ds_fill = ds_info['fill_value'] + ds_fill = ds_info["fill_value"] fill_mask = ds != ds_fill - if 'Quantisation Factor' in ds_attrs and 'Unit' in ds_attrs: - ds = ds*ds_attrs['Quantisation Factor'] + if "Quantisation Factor" in ds_attrs and "Unit" in ds_attrs: + ds = ds*ds_attrs["Quantisation Factor"] else: ds = ds*1. ds = ds.where(fill_mask) @@ -61,17 +61,17 @@ def end_time(self): @property def start_time(self): """Get start time.""" - return self.filename_info['sensing_time'] + return self.filename_info["sensing_time"] def get_dataset(self, ds_id, ds_info): """Read a HDF5 file into an xarray DataArray.""" - ds_name = ds_id['name'] - if ds_name not in ['Solar Flux', 'Thermal Flux', 'Solar Radiance', 'Thermal Radiance']: + ds_name = ds_id["name"] + if ds_name not in ["Solar Flux", "Thermal Flux", "Solar Radiance", "Thermal Radiance"]: raise KeyError(f"{ds_name} is an unknown dataset for this reader.") - ds = gerb_get_dataset(self[f'Radiometry/{ds_name}'], ds_info) + ds = gerb_get_dataset(self[f"Radiometry/{ds_name}"], ds_info) - ds.attrs.update({'start_time': self.start_time, 'data_time': self.start_time, 'end_time': self.end_time}) + ds.attrs.update({"start_time": self.start_time, "data_time": self.start_time, "end_time": self.end_time}) return ds diff --git a/satpy/readers/ghi_l1.py b/satpy/readers/ghi_l1.py index 2e26aeee24..3c085282c7 100644 --- a/satpy/readers/ghi_l1.py +++ b/satpy/readers/ghi_l1.py @@ -38,20 +38,20 @@ class HDF_GHI_L1(FY4Base): def __init__(self, filename, filename_info, filetype_info): """Init filehandler.""" super(HDF_GHI_L1, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'GHI' + self.sensor = "GHI" def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" - ds_name = dataset_id['name'] - logger.debug('Reading in get_dataset %s.', ds_name) - file_key = ds_info.get('file_key', ds_name) + ds_name = dataset_id["name"] + logger.debug("Reading in get_dataset %s.", ds_name) + file_key = ds_info.get("file_key", ds_name) if self.CHANS_ID in file_key: - file_key = f'Data/{file_key}' + file_key = f"Data/{file_key}" elif self.SUN_ID in file_key or self.SAT_ID in file_key: - file_key = f'Navigation/{file_key}' + file_key = f"Navigation/{file_key}" data = self.get(file_key) if data.ndim >= 2: - data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'}) + data = data.rename({data.dims[-2]: "y", data.dims[-1]: "x"}) data = self.calibrate(data, ds_info, ds_name, file_key) @@ -61,58 +61,58 @@ def get_dataset(self, dataset_id, ds_info): def adjust_attrs(self, data, ds_info): """Adjust the attrs of the data.""" - satname = self.PLATFORM_NAMES.get(self['/attr/Satellite Name'], self['/attr/Satellite Name']) - data.attrs.update({'platform_name': satname, - 'sensor': self['/attr/Sensor Identification Code'].lower(), - 'orbital_parameters': { - 'satellite_nominal_latitude': self['/attr/NOMSubSatLat'].item(), - 'satellite_nominal_longitude': self['/attr/NOMSubSatLon'].item(), - 'satellite_nominal_altitude': self['/attr/NOMSatHeight'].item()}}) + satname = self.PLATFORM_NAMES.get(self["/attr/Satellite Name"], self["/attr/Satellite Name"]) + data.attrs.update({"platform_name": satname, + "sensor": self["/attr/Sensor Identification Code"].lower(), + "orbital_parameters": { + "satellite_nominal_latitude": self["/attr/NOMSubSatLat"].item(), + "satellite_nominal_longitude": self["/attr/NOMSubSatLon"].item(), + "satellite_nominal_altitude": self["/attr/NOMSatHeight"].item()}}) data.attrs.update(ds_info) # remove attributes that could be confusing later - data.attrs.pop('FillValue', None) - data.attrs.pop('Intercept', None) - data.attrs.pop('Slope', None) + data.attrs.pop("FillValue", None) + data.attrs.pop("Intercept", None) + data.attrs.pop("Slope", None) def get_area_def(self, key): """Get the area definition.""" # Coordination Group for Meteorological Satellites LRIT/HRIT Global Specification # https://www.cgms-info.org/documents/cgms-lrit-hrit-global-specification-(v2-8-of-30-oct-2013).pdf - res = key['resolution'] + res = key["resolution"] pdict = {} - c_lats = self.file_content['/attr/Corner-Point Latitudes'] - c_lons = self.file_content['/attr/Corner-Point Longitudes'] + c_lats = self.file_content["/attr/Corner-Point Latitudes"] + c_lons = self.file_content["/attr/Corner-Point Longitudes"] p1 = (c_lons[0], c_lats[0]) p2 = (c_lons[1], c_lats[1]) p3 = (c_lons[2], c_lats[2]) p4 = (c_lons[3], c_lats[3]) - pdict['a'] = self.file_content['/attr/Semi_major_axis'] * 1E3 # equator radius (m) - pdict['b'] = self.file_content['/attr/Semi_minor_axis'] * 1E3 # equator radius (m) - pdict['h'] = self.file_content['/attr/NOMSatHeight'] * 1E3 # the altitude of satellite (m) + pdict["a"] = self.file_content["/attr/Semi_major_axis"] * 1E3 # equator radius (m) + pdict["b"] = self.file_content["/attr/Semi_minor_axis"] * 1E3 # equator radius (m) + pdict["h"] = self.file_content["/attr/NOMSatHeight"] * 1E3 # the altitude of satellite (m) - pdict['h'] = pdict['h'] - pdict['a'] + pdict["h"] = pdict["h"] - pdict["a"] - pdict['ssp_lon'] = float(self.file_content['/attr/NOMSubSatLon']) - pdict['nlines'] = float(self.file_content['/attr/RegLength']) - pdict['ncols'] = float(self.file_content['/attr/RegWidth']) + pdict["ssp_lon"] = float(self.file_content["/attr/NOMSubSatLon"]) + pdict["nlines"] = float(self.file_content["/attr/RegLength"]) + pdict["ncols"] = float(self.file_content["/attr/RegWidth"]) - pdict['scandir'] = 'S2N' + pdict["scandir"] = "S2N" - pdict['a_desc'] = "FY-4 {} area".format(self.filename_info['observation_type']) - pdict['a_name'] = f'{self.filename_info["observation_type"]}_{res}m' - pdict['p_id'] = f'FY-4, {res}m' + pdict["a_desc"] = "FY-4 {} area".format(self.filename_info["observation_type"]) + pdict["a_name"] = f'{self.filename_info["observation_type"]}_{res}m' + pdict["p_id"] = f"FY-4, {res}m" - proj_dict = {'a': pdict['a'], - 'b': pdict['b'], - 'lon_0': pdict['ssp_lon'], - 'h': pdict['h'], - 'proj': 'geos', - 'units': 'm', - 'sweep': 'y'} + proj_dict = {"a": pdict["a"], + "b": pdict["b"], + "lon_0": pdict["ssp_lon"], + "h": pdict["h"], + "proj": "geos", + "units": "m", + "sweep": "y"} p = Proj(proj_dict) o1 = (p(p1[0], p1[1])) # Upper left diff --git a/satpy/readers/ghrsst_l2.py b/satpy/readers/ghrsst_l2.py index 384bafa289..6c4005623e 100644 --- a/satpy/readers/ghrsst_l2.py +++ b/satpy/readers/ghrsst_l2.py @@ -39,15 +39,15 @@ def __init__(self, filename, filename_info, filetype_info, engine=None): self._engine = engine self._tarfile = None - self.filename_info['start_time'] = datetime.strptime( - self.nc.start_time, '%Y%m%dT%H%M%SZ') - self.filename_info['end_time'] = datetime.strptime( - self.nc.stop_time, '%Y%m%dT%H%M%SZ') + self.filename_info["start_time"] = datetime.strptime( + self.nc.start_time, "%Y%m%dT%H%M%SZ") + self.filename_info["end_time"] = datetime.strptime( + self.nc.stop_time, "%Y%m%dT%H%M%SZ") @cached_property def nc(self): """Get the xarray Dataset for the filename.""" - if os.fspath(self.filename).endswith('tar'): + if os.fspath(self.filename).endswith("tar"): file_obj = self._open_tarfile() else: file_obj = self.filename @@ -56,13 +56,13 @@ def nc(self): decode_cf=True, mask_and_scale=True, engine=self._engine, - chunks={'ni': CHUNK_SIZE, - 'nj': CHUNK_SIZE}) + chunks={"ni": CHUNK_SIZE, + "nj": CHUNK_SIZE}) - return nc.rename({'ni': 'x', 'nj': 'y'}) + return nc.rename({"ni": "x", "nj": "y"}) def _open_tarfile(self): - self._tarfile = tarfile.open(name=self.filename, mode='r') + self._tarfile = tarfile.open(name=self.filename, mode="r") sst_filename = next((name for name in self._tarfile.getnames() if self._is_sst_file(name))) file_obj = self._tarfile.extractfile(sst_filename) @@ -71,27 +71,27 @@ def _open_tarfile(self): @staticmethod def _is_sst_file(name): """Check if file in the tar archive is a valid SST file.""" - return name.endswith('nc') and 'GHRSST-SSTskin' in name + return name.endswith("nc") and "GHRSST-SSTskin" in name def get_dataset(self, key, info): """Get any available dataset.""" - stdname = info.get('standard_name') + stdname = info.get("standard_name") return self.nc[stdname].squeeze() @property def start_time(self): """Get start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" - return self.filename_info['end_time'] + return self.filename_info["end_time"] @property def sensor(self): """Get the sensor name.""" - return self.nc.attrs['sensor'].lower() + return self.nc.attrs["sensor"].lower() def __del__(self): """Close the tarfile object.""" diff --git a/satpy/readers/ghrsst_l3c_sst.py b/satpy/readers/ghrsst_l3c_sst.py index d35621d341..ef1dd220a9 100644 --- a/satpy/readers/ghrsst_l3c_sst.py +++ b/satpy/readers/ghrsst_l3c_sst.py @@ -28,16 +28,16 @@ logger = logging.getLogger(__name__) -PLATFORM_NAME = {'NPP': 'Suomi-NPP', } -SENSOR_NAME = {'VIIRS': 'viirs', - 'AVHRR': 'avhrr/3'} +PLATFORM_NAME = {"NPP": "Suomi-NPP", } +SENSOR_NAME = {"VIIRS": "viirs", + "AVHRR": "avhrr/3"} class GHRSST_OSISAFL2(NetCDF4FileHandler): """Reader for the OSISAF SST GHRSST format.""" def _parse_datetime(self, datestr): - return datetime.strptime(datestr, '%Y%m%dT%H%M%SZ') + return datetime.strptime(datestr, "%Y%m%dT%H%M%SZ") def get_area_def(self, area_id, area_info): """Override abstract baseclass method.""" @@ -45,21 +45,21 @@ def get_area_def(self, area_id, area_info): def get_dataset(self, dataset_id, ds_info, out=None): """Load a dataset.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) - dtype = ds_info.get('dtype', np.float32) - if var_path + '/shape' not in self: + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) + dtype = ds_info.get("dtype", np.float32) + if var_path + "/shape" not in self: # loading a scalar value shape = 1 else: - shape = self[var_path + '/shape'] + shape = self[var_path + "/shape"] if shape[0] == 1: # Remove the time dimenstion from dataset shape = shape[1], shape[2] - file_units = ds_info.get('file_units') + file_units = ds_info.get("file_units") if file_units is None: try: - file_units = self[var_path + '/attr/units'] + file_units = self[var_path + "/attr/units"] # they were almost completely CF compliant... if file_units == "none": file_units = "1" @@ -76,19 +76,19 @@ def get_dataset(self, dataset_id, ds_info, out=None): ds_info.update({ "units": ds_info.get("units", file_units), - "platform_name": PLATFORM_NAME.get(self['/attr/platform'], self['/attr/platform']), - "sensor": SENSOR_NAME.get(self['/attr/sensor'], self['/attr/sensor']), + "platform_name": PLATFORM_NAME.get(self["/attr/platform"], self["/attr/platform"]), + "sensor": SENSOR_NAME.get(self["/attr/sensor"], self["/attr/sensor"]), }) ds_info.update(dataset_id.to_dict()) cls = ds_info.pop("container", Dataset) return cls(out, **ds_info) def _scale_and_mask_data(self, out, var_path): - valid_min = self[var_path + '/attr/valid_min'] - valid_max = self[var_path + '/attr/valid_max'] + valid_min = self[var_path + "/attr/valid_min"] + valid_max = self[var_path + "/attr/valid_max"] try: - scale_factor = self[var_path + '/attr/scale_factor'] - scale_offset = self[var_path + '/attr/add_offset'] + scale_factor = self[var_path + "/attr/scale_factor"] + scale_offset = self[var_path + "/attr/add_offset"] except KeyError: scale_factor = scale_offset = None if valid_min is not None and valid_max is not None: @@ -100,16 +100,16 @@ def _scale_and_mask_data(self, out, var_path): def get_lonlats(self, navid, nav_info, lon_out=None, lat_out=None): """Load an area.""" - lon_key = 'lon' - valid_min = self[lon_key + '/attr/valid_min'] - valid_max = self[lon_key + '/attr/valid_max'] + lon_key = "lon" + valid_min = self[lon_key + "/attr/valid_min"] + valid_max = self[lon_key + "/attr/valid_max"] lon_out.data[:] = self[lon_key][::-1] lon_out.mask[:] = (lon_out < valid_min) | (lon_out > valid_max) - lat_key = 'lat' - valid_min = self[lat_key + '/attr/valid_min'] - valid_max = self[lat_key + '/attr/valid_max'] + lat_key = "lat" + valid_min = self[lat_key + "/attr/valid_min"] + valid_max = self[lat_key + "/attr/valid_max"] lat_out.data[:] = self[lat_key][::-1] lat_out.mask[:] = (lat_out < valid_min) | (lat_out > valid_max) @@ -119,9 +119,9 @@ def get_lonlats(self, navid, nav_info, lon_out=None, lat_out=None): def start_time(self): """Get start time.""" # return self.filename_info['start_time'] - return self._parse_datetime(self['/attr/start_time']) + return self._parse_datetime(self["/attr/start_time"]) @property def end_time(self): """Get end time.""" - return self._parse_datetime(self['/attr/stop_time']) + return self._parse_datetime(self["/attr/stop_time"]) diff --git a/satpy/readers/glm_l2.py b/satpy/readers/glm_l2.py index bfb2719b07..ceb11a33bc 100644 --- a/satpy/readers/glm_l2.py +++ b/satpy/readers/glm_l2.py @@ -33,9 +33,9 @@ logger = logging.getLogger(__name__) PLATFORM_NAMES = { - 'G16': 'GOES-16', - 'G17': 'GOES-17', - 'G18': 'GOES-18', + "G16": "GOES-16", + "G17": "GOES-17", + "G18": "GOES-18", } # class NC_GLM_L2_LCFA(BaseFileHandler): — add this with glmtools @@ -47,45 +47,45 @@ class NCGriddedGLML2(NC_ABI_BASE): @property def sensor(self): """Get sensor name for current file handler.""" - return 'glm' + return "glm" @property def start_time(self): """Start time of the current file's observations.""" - return datetime.strptime(self.nc.attrs['time_coverage_start'], '%Y-%m-%dT%H:%M:%SZ') + return datetime.strptime(self.nc.attrs["time_coverage_start"], "%Y-%m-%dT%H:%M:%SZ") @property def end_time(self): """End time of the current file's observations.""" - return datetime.strptime(self.nc.attrs['time_coverage_end'], '%Y-%m-%dT%H:%M:%SZ') + return datetime.strptime(self.nc.attrs["time_coverage_end"], "%Y-%m-%dT%H:%M:%SZ") def _is_category_product(self, data_arr): # if after autoscaling we still have an integer is_int = np.issubdtype(data_arr.dtype, np.integer) # and it has a fill value - has_fill = '_FillValue' in data_arr.attrs + has_fill = "_FillValue" in data_arr.attrs # or it has flag_meanings - has_meanings = 'flag_meanings' in data_arr.attrs + has_meanings = "flag_meanings" in data_arr.attrs # then it is likely a category product and we should keep the # _FillValue for satpy to use later return is_int and (has_fill or has_meanings) def get_dataset(self, key, info): """Load a dataset.""" - logger.debug('Reading in get_dataset %s.', key['name']) - res = self[key['name']] - res.attrs.update({'platform_name': self.platform_name, - 'sensor': self.sensor}) + logger.debug("Reading in get_dataset %s.", key["name"]) + res = self[key["name"]] + res.attrs.update({"platform_name": self.platform_name, + "sensor": self.sensor}) res.attrs.update(self.filename_info) # Add orbital parameters projection = self.nc["goes_imager_projection"] - res.attrs['orbital_parameters'] = { - 'projection_longitude': float(projection.attrs['longitude_of_projection_origin']), - 'projection_latitude': float(projection.attrs['latitude_of_projection_origin']), - 'projection_altitude': float(projection.attrs['perspective_point_height']), - 'satellite_nominal_latitude': float(self['nominal_satellite_subpoint_lat']), - 'satellite_nominal_longitude': float(self['nominal_satellite_subpoint_lon']), + res.attrs["orbital_parameters"] = { + "projection_longitude": float(projection.attrs["longitude_of_projection_origin"]), + "projection_latitude": float(projection.attrs["latitude_of_projection_origin"]), + "projection_altitude": float(projection.attrs["perspective_point_height"]), + "satellite_nominal_latitude": float(self["nominal_satellite_subpoint_lat"]), + "satellite_nominal_longitude": float(self["nominal_satellite_subpoint_lon"]), # 'satellite_nominal_altitude': float(self['nominal_satellite_height']), } @@ -93,25 +93,25 @@ def get_dataset(self, key, info): # remove attributes that could be confusing later if not self._is_category_product(res): - res.attrs.pop('_FillValue', None) - res.attrs.pop('scale_factor', None) - res.attrs.pop('add_offset', None) - res.attrs.pop('_Unsigned', None) - res.attrs.pop('ancillary_variables', None) # Can't currently load DQF + res.attrs.pop("_FillValue", None) + res.attrs.pop("scale_factor", None) + res.attrs.pop("add_offset", None) + res.attrs.pop("_Unsigned", None) + res.attrs.pop("ancillary_variables", None) # Can't currently load DQF # add in information from the filename that may be useful to the user # for key in ('observation_type', 'scene_abbr', 'scan_mode', 'platform_shortname'): - for attr in ('scene_abbr', 'scan_mode', 'platform_shortname'): + for attr in ("scene_abbr", "scan_mode", "platform_shortname"): res.attrs[attr] = self.filename_info[attr] # copy global attributes to metadata - for attr in ('scene_id', 'orbital_slot', 'instrument_ID', - 'production_site', 'timeline_ID', 'spatial_resolution'): + for attr in ("scene_id", "orbital_slot", "instrument_ID", + "production_site", "timeline_ID", "spatial_resolution"): res.attrs[attr] = self.nc.attrs.get(attr) return res def _is_2d_xy_var(self, data_arr): is_2d = data_arr.ndim == 2 - has_x_dim = 'x' in data_arr.dims - has_y_dim = 'y' in data_arr.dims + has_x_dim = "x" in data_arr.dims + has_y_dim = "y" in data_arr.dims return is_2d and has_x_dim and has_y_dim def available_datasets(self, configured_datasets=None): @@ -127,14 +127,14 @@ def available_datasets(self, configured_datasets=None): if is_avail is not None: yield is_avail, ds_info - matches = self.file_type_matches(ds_info['file_type']) - if matches and ds_info.get('resolution') != res: + matches = self.file_type_matches(ds_info["file_type"]) + if matches and ds_info.get("resolution") != res: # we are meant to handle this dataset (file type matches) # and the information we can provide isn't available yet new_info = ds_info.copy() - new_info['resolution'] = res - exists = ds_info['name'] in self.nc - handled_vars.add(ds_info['name']) + new_info["resolution"] = res + exists = ds_info["name"] in self.nc + handled_vars.add(ds_info["name"]) yield exists, new_info elif is_avail is None: # we don't know what to do with this @@ -150,9 +150,9 @@ def available_datasets(self, configured_datasets=None): continue new_info = { - 'name': var_name, - 'resolution': res, - 'file_type': self.filetype_info['file_type'] + "name": var_name, + "resolution": res, + "file_type": self.filetype_info["file_type"] } handled_vars.add(var_name) yield True, new_info diff --git a/satpy/readers/gms/gms5_vissr_format.py b/satpy/readers/gms/gms5_vissr_format.py index a5052097eb..a48fcde77f 100644 --- a/satpy/readers/gms/gms5_vissr_format.py +++ b/satpy/readers/gms/gms5_vissr_format.py @@ -32,329 +32,329 @@ CHANNELS = [("VIS", R4), ("IR1", R4), ("IR2", R4), ("WV", R4)] VISIR_SOLAR = [("VIS", R4), ("IR", R4)] -CONTROL_BLOCK = np.dtype([('control_block_size', I2), - ('head_block_number_of_parameter_block', I2), - ('parameter_block_size', I2), - ('head_block_number_of_image_data', I2), - ('total_block_size_of_image_data', I2), - ('available_block_size_of_image_data', I2), - ('head_valid_line_number', I2), - ('final_valid_line_number', I2), - ('final_data_block_number', I2)]) +CONTROL_BLOCK = np.dtype([("control_block_size", I2), + ("head_block_number_of_parameter_block", I2), + ("parameter_block_size", I2), + ("head_block_number_of_image_data", I2), + ("total_block_size_of_image_data", I2), + ("available_block_size_of_image_data", I2), + ("head_valid_line_number", I2), + ("final_valid_line_number", I2), + ("final_data_block_number", I2)]) -MODE_BLOCK_FRAME_PARAMETERS = [('bit_length', I4), - ('number_of_lines', I4), - ('number_of_pixels', I4), - ('stepping_angle', R4), - ('sampling_angle', R4), - ('lcw_pixel_size', I4), - ('doc_pixel_size', I4), - ('reserved', I4)] +MODE_BLOCK_FRAME_PARAMETERS = [("bit_length", I4), + ("number_of_lines", I4), + ("number_of_pixels", I4), + ("stepping_angle", R4), + ("sampling_angle", R4), + ("lcw_pixel_size", I4), + ("doc_pixel_size", I4), + ("reserved", I4)] -MODE_BLOCK = np.dtype([('satellite_number', I4), - ('satellite_name', '|S12'), - ('observation_time_ad', '|S16'), - ('observation_time_mjd', R8), - ('gms_operation_mode', I4), - ('dpc_operation_mode', I4), - ('vissr_observation_mode', I4), - ('scanner_selection', I4), - ('sensor_selection', I4), - ('sensor_mode', I4), - ('scan_frame_mode', I4), - ('scan_mode', I4), - ('upper_limit_of_scan_number', I4), - ('lower_limit_of_scan_number', I4), - ('equatorial_scan_line_number', I4), - ('spin_rate', R4), - ('vis_frame_parameters', MODE_BLOCK_FRAME_PARAMETERS), - ('ir_frame_parameters', MODE_BLOCK_FRAME_PARAMETERS), - ('satellite_height', R4), - ('earth_radius', R4), - ('ssp_longitude', R4), - ('reserved_1', I4, 9), - ('table_of_sensor_trouble', I4, 14), - ('reserved_2', I4, 36), - ('status_tables_of_data_relative_address_segment', I4, 60)]) +MODE_BLOCK = np.dtype([("satellite_number", I4), + ("satellite_name", "|S12"), + ("observation_time_ad", "|S16"), + ("observation_time_mjd", R8), + ("gms_operation_mode", I4), + ("dpc_operation_mode", I4), + ("vissr_observation_mode", I4), + ("scanner_selection", I4), + ("sensor_selection", I4), + ("sensor_mode", I4), + ("scan_frame_mode", I4), + ("scan_mode", I4), + ("upper_limit_of_scan_number", I4), + ("lower_limit_of_scan_number", I4), + ("equatorial_scan_line_number", I4), + ("spin_rate", R4), + ("vis_frame_parameters", MODE_BLOCK_FRAME_PARAMETERS), + ("ir_frame_parameters", MODE_BLOCK_FRAME_PARAMETERS), + ("satellite_height", R4), + ("earth_radius", R4), + ("ssp_longitude", R4), + ("reserved_1", I4, 9), + ("table_of_sensor_trouble", I4, 14), + ("reserved_2", I4, 36), + ("status_tables_of_data_relative_address_segment", I4, 60)]) COORDINATE_CONVERSION_PARAMETERS = np.dtype([ - ('data_segment', I4), - ('data_validity', I4), - ('data_generation_time', TIME), - ('scheduled_observation_time', R8), - ('stepping_angle_along_line', CHANNELS), - ('sampling_angle_along_pixel', CHANNELS), - ('central_line_number_of_vissr_frame', CHANNELS), - ('central_pixel_number_of_vissr_frame', CHANNELS), - ('pixel_difference_of_vissr_center_from_normal_position', CHANNELS), - ('number_of_sensor_elements', CHANNELS), - ('total_number_of_vissr_frame_lines', CHANNELS), - ('total_number_of_vissr_frame_pixels', CHANNELS), - ('vissr_misalignment', R4, (3,)), - ('matrix_of_misalignment', R4, (3, 3)), - ('parameters', [('judgement_of_observation_convergence_time', R4), - ('judgement_of_line_convergence', R4), - ('east_west_angle_of_sun_light_condense_prism', R4), - ('north_south_angle_of_sun_light_condense_prism', R4), - ('pi', R4), - ('pi_divided_by_180', R4), - ('180_divided_by_pi', R4), - ('equatorial_radius', R4), - ('oblateness_of_earth', R4), - ('eccentricity_of_earth_orbit', R4), - ('first_angle_of_vissr_observation_in_sdb', R4), - ('upper_limited_line_of_2nd_prism_for_vis_solar_observation', R4), - ('lower_limited_line_of_1st_prism_for_vis_solar_observation', R4), - ('upper_limited_line_of_3rd_prism_for_vis_solar_observation', R4), - ('lower_limited_line_of_2nd_prism_for_vis_solar_observation', R4)]), - ('solar_stepping_angle_along_line', VISIR_SOLAR), - ('solar_sampling_angle_along_pixel', VISIR_SOLAR), - ('solar_center_line_of_vissr_frame', VISIR_SOLAR), - ('solar_center_pixel_of_vissr_frame', VISIR_SOLAR), - ('solar_pixel_difference_of_vissr_center_from_normal_position', VISIR_SOLAR), - ('solar_number_of_sensor_elements', VISIR_SOLAR), - ('solar_total_number_of_vissr_frame_lines', VISIR_SOLAR), - ('solar_total_number_of_vissr_frame_pixels', VISIR_SOLAR), - ('reserved_1', I4, 19), - ('orbital_parameters', [('epoch_time', R8), - ('semi_major_axis', R8), - ('eccentricity', R8), - ('orbital_inclination', R8), - ('longitude_of_ascending_node', R8), - ('argument_of_perigee', R8), - ('mean_anomaly', R8), - ('longitude_of_ssp', R8), - ('latitude_of_ssp', R8)]), - ('reserved_2', I4, 2), - ('attitude_parameters', [('epoch_time', R8), - ('angle_between_z_axis_and_satellite_spin_axis_at_epoch_time', R8), - ('angle_change_rate_between_spin_axis_and_z_axis', R8), - ('angle_between_spin_axis_and_zy_axis', R8), - ('angle_change_rate_between_spin_axis_and_zt_axis', R8), - ('daily_mean_of_spin_rate', R8)]), - ('reserved_3', I4, 529), - ('correction_of_image_distortion', [('stepping_angle_along_line_of_ir1', R4), - ('stepping_angle_along_line_of_ir2', R4), - ('stepping_angle_along_line_of_wv', R4), - ('stepping_angle_along_line_of_vis', R4), - ('sampling_angle_along_pixel_of_ir1', R4), - ('sampling_angle_along_pixel_of_ir2', R4), - ('sampling_angle_along_pixel_of_wv', R4), - ('sampling_angle_along_pixel_of_vis', R4), - ('x_component_vissr_misalignment', R4), - ('y_component_vissr_misalignment', R4)]) + ("data_segment", I4), + ("data_validity", I4), + ("data_generation_time", TIME), + ("scheduled_observation_time", R8), + ("stepping_angle_along_line", CHANNELS), + ("sampling_angle_along_pixel", CHANNELS), + ("central_line_number_of_vissr_frame", CHANNELS), + ("central_pixel_number_of_vissr_frame", CHANNELS), + ("pixel_difference_of_vissr_center_from_normal_position", CHANNELS), + ("number_of_sensor_elements", CHANNELS), + ("total_number_of_vissr_frame_lines", CHANNELS), + ("total_number_of_vissr_frame_pixels", CHANNELS), + ("vissr_misalignment", R4, (3,)), + ("matrix_of_misalignment", R4, (3, 3)), + ("parameters", [("judgement_of_observation_convergence_time", R4), + ("judgement_of_line_convergence", R4), + ("east_west_angle_of_sun_light_condense_prism", R4), + ("north_south_angle_of_sun_light_condense_prism", R4), + ("pi", R4), + ("pi_divided_by_180", R4), + ("180_divided_by_pi", R4), + ("equatorial_radius", R4), + ("oblateness_of_earth", R4), + ("eccentricity_of_earth_orbit", R4), + ("first_angle_of_vissr_observation_in_sdb", R4), + ("upper_limited_line_of_2nd_prism_for_vis_solar_observation", R4), + ("lower_limited_line_of_1st_prism_for_vis_solar_observation", R4), + ("upper_limited_line_of_3rd_prism_for_vis_solar_observation", R4), + ("lower_limited_line_of_2nd_prism_for_vis_solar_observation", R4)]), + ("solar_stepping_angle_along_line", VISIR_SOLAR), + ("solar_sampling_angle_along_pixel", VISIR_SOLAR), + ("solar_center_line_of_vissr_frame", VISIR_SOLAR), + ("solar_center_pixel_of_vissr_frame", VISIR_SOLAR), + ("solar_pixel_difference_of_vissr_center_from_normal_position", VISIR_SOLAR), + ("solar_number_of_sensor_elements", VISIR_SOLAR), + ("solar_total_number_of_vissr_frame_lines", VISIR_SOLAR), + ("solar_total_number_of_vissr_frame_pixels", VISIR_SOLAR), + ("reserved_1", I4, 19), + ("orbital_parameters", [("epoch_time", R8), + ("semi_major_axis", R8), + ("eccentricity", R8), + ("orbital_inclination", R8), + ("longitude_of_ascending_node", R8), + ("argument_of_perigee", R8), + ("mean_anomaly", R8), + ("longitude_of_ssp", R8), + ("latitude_of_ssp", R8)]), + ("reserved_2", I4, 2), + ("attitude_parameters", [("epoch_time", R8), + ("angle_between_z_axis_and_satellite_spin_axis_at_epoch_time", R8), + ("angle_change_rate_between_spin_axis_and_z_axis", R8), + ("angle_between_spin_axis_and_zy_axis", R8), + ("angle_change_rate_between_spin_axis_and_zt_axis", R8), + ("daily_mean_of_spin_rate", R8)]), + ("reserved_3", I4, 529), + ("correction_of_image_distortion", [("stepping_angle_along_line_of_ir1", R4), + ("stepping_angle_along_line_of_ir2", R4), + ("stepping_angle_along_line_of_wv", R4), + ("stepping_angle_along_line_of_vis", R4), + ("sampling_angle_along_pixel_of_ir1", R4), + ("sampling_angle_along_pixel_of_ir2", R4), + ("sampling_angle_along_pixel_of_wv", R4), + ("sampling_angle_along_pixel_of_vis", R4), + ("x_component_vissr_misalignment", R4), + ("y_component_vissr_misalignment", R4)]) ]) -ATTITUDE_PREDICTION_DATA = np.dtype([('prediction_time_mjd', R8), - ('prediction_time_utc', TIME), - ('right_ascension_of_attitude', R8), - ('declination_of_attitude', R8), - ('sun_earth_angle', R8), - ('spin_rate', R8), - ('right_ascension_of_orbital_plane', R8), - ('declination_of_orbital_plane', R8), - ('reserved', R8), - ('eclipse_flag', I4), - ('spin_axis_flag', I4)]) +ATTITUDE_PREDICTION_DATA = np.dtype([("prediction_time_mjd", R8), + ("prediction_time_utc", TIME), + ("right_ascension_of_attitude", R8), + ("declination_of_attitude", R8), + ("sun_earth_angle", R8), + ("spin_rate", R8), + ("right_ascension_of_orbital_plane", R8), + ("declination_of_orbital_plane", R8), + ("reserved", R8), + ("eclipse_flag", I4), + ("spin_axis_flag", I4)]) -ATTITUDE_PREDICTION = np.dtype([('data_segment', I4), - ('data_validity', I4), - ('data_generation_time', TIME), - ('start_time', R8), - ('end_time', R8), - ('prediction_interval_time', R8), - ('number_of_prediction', I4), - ('data_size', I4), - ('data', ATTITUDE_PREDICTION_DATA, (33,))]) +ATTITUDE_PREDICTION = np.dtype([("data_segment", I4), + ("data_validity", I4), + ("data_generation_time", TIME), + ("start_time", R8), + ("end_time", R8), + ("prediction_interval_time", R8), + ("number_of_prediction", I4), + ("data_size", I4), + ("data", ATTITUDE_PREDICTION_DATA, (33,))]) -ORBIT_PREDICTION_DATA = [('prediction_time_mjd', R8), - ('prediction_time_utc', TIME), - ('satellite_position_1950', R8, (3,)), - ('satellite_velocity_1950', R8, (3,)), - ('satellite_position_earth_fixed', R8, (3,)), - ('satellite_velocity_earth_fixed', R8, (3,)), - ('greenwich_sidereal_time', R8), - ('sat_sun_vector_1950', [('azimuth', R8), - ('elevation', R8)]), - ('sat_sun_vector_earth_fixed', [('azimuth', R8), - ('elevation', R8)]), - ('conversion_matrix', R8, (3, 3)), - ('moon_directional_vector', R8, (3,)), - ('satellite_position', [('ssp_longitude', R8), - ('ssp_latitude', R8), - ('satellite_height', R8)]), - ('eclipse_period_flag', I4), - ('reserved', I4)] +ORBIT_PREDICTION_DATA = [("prediction_time_mjd", R8), + ("prediction_time_utc", TIME), + ("satellite_position_1950", R8, (3,)), + ("satellite_velocity_1950", R8, (3,)), + ("satellite_position_earth_fixed", R8, (3,)), + ("satellite_velocity_earth_fixed", R8, (3,)), + ("greenwich_sidereal_time", R8), + ("sat_sun_vector_1950", [("azimuth", R8), + ("elevation", R8)]), + ("sat_sun_vector_earth_fixed", [("azimuth", R8), + ("elevation", R8)]), + ("conversion_matrix", R8, (3, 3)), + ("moon_directional_vector", R8, (3,)), + ("satellite_position", [("ssp_longitude", R8), + ("ssp_latitude", R8), + ("satellite_height", R8)]), + ("eclipse_period_flag", I4), + ("reserved", I4)] -ORBIT_PREDICTION = np.dtype([('data_segment', I4), - ('data_validity', I4), - ('data_generation_time', TIME), - ('start_time', R8), - ('end_time', R8), - ('prediction_interval_time', R8), - ('number_of_prediction', I4), - ('data_size', I4), - ('data', ORBIT_PREDICTION_DATA, (9,))]) +ORBIT_PREDICTION = np.dtype([("data_segment", I4), + ("data_validity", I4), + ("data_generation_time", TIME), + ("start_time", R8), + ("end_time", R8), + ("prediction_interval_time", R8), + ("number_of_prediction", I4), + ("data_size", I4), + ("data", ORBIT_PREDICTION_DATA, (9,))]) VIS_CALIBRATION_TABLE = np.dtype([ - ('channel_number', I4), - ('data_validity', I4), - ('updated_time', TIME), - ('table_id', I4), - ('brightness_albedo_conversion_table', R4, (64,)), - ('vis_channel_staircase_brightness_data', R4, (6,)), - ('coefficients_table_of_vis_staircase_regression_curve', R4, (10,)), - ('brightness_table_for_calibration', [('universal_space_brightness', R4), - ('solar_brightness', R4)]), - ('calibration_uses_brightness_correspondence_voltage_chart', [('universal_space_voltage', R4), - ('solar_voltage', R4)]), - ('calibration_coefficients_of_radiation_observation', [('G', R4), ('V0', R4)]), - ('reserved', I4, (9,)) + ("channel_number", I4), + ("data_validity", I4), + ("updated_time", TIME), + ("table_id", I4), + ("brightness_albedo_conversion_table", R4, (64,)), + ("vis_channel_staircase_brightness_data", R4, (6,)), + ("coefficients_table_of_vis_staircase_regression_curve", R4, (10,)), + ("brightness_table_for_calibration", [("universal_space_brightness", R4), + ("solar_brightness", R4)]), + ("calibration_uses_brightness_correspondence_voltage_chart", [("universal_space_voltage", R4), + ("solar_voltage", R4)]), + ("calibration_coefficients_of_radiation_observation", [("G", R4), ("V0", R4)]), + ("reserved", I4, (9,)) ]) -VIS_CALIBRATION = np.dtype([('data_segment', I4), - ('data_validity', I4), - ('data_generation_time', TIME), - ('sensor_group', I4), - ('vis1_calibration_table', VIS_CALIBRATION_TABLE), - ('vis2_calibration_table', VIS_CALIBRATION_TABLE), - ('vis3_calibration_table', VIS_CALIBRATION_TABLE), - ('reserved', I4, (267,))]) +VIS_CALIBRATION = np.dtype([("data_segment", I4), + ("data_validity", I4), + ("data_generation_time", TIME), + ("sensor_group", I4), + ("vis1_calibration_table", VIS_CALIBRATION_TABLE), + ("vis2_calibration_table", VIS_CALIBRATION_TABLE), + ("vis3_calibration_table", VIS_CALIBRATION_TABLE), + ("reserved", I4, (267,))]) TELEMETRY_DATA = np.dtype([ - ('shutter_temp', R4), - ('redundant_mirror_temp', R4), - ('primary_mirror_temp', R4), - ('baffle_fw_temp', R4), - ('baffle_af_temp', R4), - ('15_volt_auxiliary_power_supply', R4), - ('radiative_cooler_temp_1', R4), - ('radiative_cooler_temp_2', R4), - ('electronics_module_temp', R4), - ('scan_mirror_temp', R4), - ('shutter_cavity_temp', R4), - ('primary_mirror_sealed_temp', R4), - ('redundant_mirror_sealed_temp', R4), - ('shutter_temp_2', R4), - ('reserved', R4, (2,)) + ("shutter_temp", R4), + ("redundant_mirror_temp", R4), + ("primary_mirror_temp", R4), + ("baffle_fw_temp", R4), + ("baffle_af_temp", R4), + ("15_volt_auxiliary_power_supply", R4), + ("radiative_cooler_temp_1", R4), + ("radiative_cooler_temp_2", R4), + ("electronics_module_temp", R4), + ("scan_mirror_temp", R4), + ("shutter_cavity_temp", R4), + ("primary_mirror_sealed_temp", R4), + ("redundant_mirror_sealed_temp", R4), + ("shutter_temp_2", R4), + ("reserved", R4, (2,)) ]) IR_CALIBRATION = np.dtype([ - ('data_segment', I4), - ('data_validity', I4), - ('updated_time', TIME), - ('sensor_group', I4), - ('table_id', I4), - ('reserved_1', I4, (2,)), - ('conversion_table_of_equivalent_black_body_radiation', R4, (256,)), - ('conversion_table_of_equivalent_black_body_temperature', R4, (256,)), - ('staircase_brightness_data', R4, (6,)), - ('coefficients_table_of_staircase_regression_curve', R4, (10,)), - ('brightness_data_for_calibration', [('brightness_of_space', R4), - ('brightness_of_black_body_shutter', R4), - ('reserved', R4)]), - ('voltage_table_for_brightness_of_calibration', [('voltage_of_space', R4), - ('voltage_of_black_body_shutter', R4), - ('reserved', R4)]), - ('calibration_coefficients_of_radiation_observation', [('G', R4), ('V0', R4)]), - ('valid_shutter_temperature', R4), - ('valid_shutter_radiation', R4), - ('telemetry_data_table', TELEMETRY_DATA), - ('flag_of_calid_shutter_temperature_calculation', I4), - ('reserved_2', I4, (109,)) + ("data_segment", I4), + ("data_validity", I4), + ("updated_time", TIME), + ("sensor_group", I4), + ("table_id", I4), + ("reserved_1", I4, (2,)), + ("conversion_table_of_equivalent_black_body_radiation", R4, (256,)), + ("conversion_table_of_equivalent_black_body_temperature", R4, (256,)), + ("staircase_brightness_data", R4, (6,)), + ("coefficients_table_of_staircase_regression_curve", R4, (10,)), + ("brightness_data_for_calibration", [("brightness_of_space", R4), + ("brightness_of_black_body_shutter", R4), + ("reserved", R4)]), + ("voltage_table_for_brightness_of_calibration", [("voltage_of_space", R4), + ("voltage_of_black_body_shutter", R4), + ("reserved", R4)]), + ("calibration_coefficients_of_radiation_observation", [("G", R4), ("V0", R4)]), + ("valid_shutter_temperature", R4), + ("valid_shutter_radiation", R4), + ("telemetry_data_table", TELEMETRY_DATA), + ("flag_of_calid_shutter_temperature_calculation", I4), + ("reserved_2", I4, (109,)) ]) SIMPLE_COORDINATE_CONVERSION_TABLE = np.dtype([ - ('coordinate_conversion_table', I2, (1250,)), - ('earth_equator_radius', R4), - ('satellite_height', R4), - ('stepping_angle', R4), - ('sampling_angle', R4), - ('ssp_latitude', R4), - ('ssp_longitude', R4), - ('ssp_line_number', R4), - ('ssp_pixel_number', R4), - ('pi', R4), - ('line_correction_ir1_vis', R4), - ('pixel_correction_ir1_vis', R4), - ('line_correction_ir1_ir2', R4), - ('pixel_correction_ir1_ir2', R4), - ('line_correction_ir1_wv', R4), - ('pixel_correction_ir1_wv', R4), - ('reserved', R4, (32,)), + ("coordinate_conversion_table", I2, (1250,)), + ("earth_equator_radius", R4), + ("satellite_height", R4), + ("stepping_angle", R4), + ("sampling_angle", R4), + ("ssp_latitude", R4), + ("ssp_longitude", R4), + ("ssp_line_number", R4), + ("ssp_pixel_number", R4), + ("pi", R4), + ("line_correction_ir1_vis", R4), + ("pixel_correction_ir1_vis", R4), + ("line_correction_ir1_ir2", R4), + ("pixel_correction_ir1_ir2", R4), + ("line_correction_ir1_wv", R4), + ("pixel_correction_ir1_wv", R4), + ("reserved", R4, (32,)), ]) IMAGE_PARAMS = { - 'mode': { - 'dtype': MODE_BLOCK, - 'offset': { + "mode": { + "dtype": MODE_BLOCK, + "offset": { VIS_CHANNEL: 2 * BLOCK_SIZE_VIS, IR_CHANNEL: 2 * BLOCK_SIZE_IR } }, - 'coordinate_conversion': { - 'dtype': COORDINATE_CONVERSION_PARAMETERS, - 'offset': { + "coordinate_conversion": { + "dtype": COORDINATE_CONVERSION_PARAMETERS, + "offset": { VIS_CHANNEL: 2 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 4 * BLOCK_SIZE_IR } }, - 'attitude_prediction': { - 'dtype': ATTITUDE_PREDICTION, - 'offset': { + "attitude_prediction": { + "dtype": ATTITUDE_PREDICTION, + "offset": { VIS_CHANNEL: 2 * BLOCK_SIZE_VIS + 3 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 5 * BLOCK_SIZE_IR }, - 'preserve': 'data' + "preserve": "data" }, - 'orbit_prediction_1': { - 'dtype': ORBIT_PREDICTION, - 'offset': { + "orbit_prediction_1": { + "dtype": ORBIT_PREDICTION, + "offset": { VIS_CHANNEL: 3 * BLOCK_SIZE_VIS, IR_CHANNEL: 6 * BLOCK_SIZE_IR }, - 'preserve': 'data' + "preserve": "data" }, - 'orbit_prediction_2': { - 'dtype': ORBIT_PREDICTION, - 'offset': { + "orbit_prediction_2": { + "dtype": ORBIT_PREDICTION, + "offset": { VIS_CHANNEL: 3 * BLOCK_SIZE_VIS + 1 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 7 * BLOCK_SIZE_IR }, - 'preserve': 'data' + "preserve": "data" }, - 'vis_calibration': { - 'dtype': VIS_CALIBRATION, - 'offset': { + "vis_calibration": { + "dtype": VIS_CALIBRATION, + "offset": { VIS_CHANNEL: 3 * BLOCK_SIZE_VIS + 3 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 9 * BLOCK_SIZE_IR }, - 'preserve': 'data' + "preserve": "data" }, - 'ir1_calibration': { - 'dtype': IR_CALIBRATION, - 'offset': { + "ir1_calibration": { + "dtype": IR_CALIBRATION, + "offset": { VIS_CHANNEL: 4 * BLOCK_SIZE_VIS, IR_CHANNEL: 10 * BLOCK_SIZE_IR }, }, - 'ir2_calibration': { - 'dtype': IR_CALIBRATION, - 'offset': { + "ir2_calibration": { + "dtype": IR_CALIBRATION, + "offset": { VIS_CHANNEL: 4 * BLOCK_SIZE_VIS + IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 11 * BLOCK_SIZE_IR }, }, - 'wv_calibration': { - 'dtype': IR_CALIBRATION, - 'offset': { + "wv_calibration": { + "dtype": IR_CALIBRATION, + "offset": { VIS_CHANNEL: 4 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 12 * BLOCK_SIZE_IR }, }, - 'simple_coordinate_conversion_table': { - 'dtype': SIMPLE_COORDINATE_CONVERSION_TABLE, - 'offset': { + "simple_coordinate_conversion_table": { + "dtype": SIMPLE_COORDINATE_CONVERSION_TABLE, + "offset": { VIS_CHANNEL: 5 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE, IR_CHANNEL: 16 * BLOCK_SIZE_IR }, @@ -362,36 +362,36 @@ } LINE_CONTROL_WORD = np.dtype([ - ('data_id', U1, (4, )), - ('line_number', I4), - ('line_name', I4), - ('error_line_flag', I4), - ('error_message', I4), - ('mode_error_flag', I4), - ('scan_time', R8), - ('beta_angle', R4), - ('west_side_earth_edge', I4), - ('east_side_earth_edge', I4), - ('received_time_1', R8), # Typo in format description (I*4) - ('received_time_2', I4), - ('reserved', U1, (8, )) + ("data_id", U1, (4, )), + ("line_number", I4), + ("line_name", I4), + ("error_line_flag", I4), + ("error_message", I4), + ("mode_error_flag", I4), + ("scan_time", R8), + ("beta_angle", R4), + ("west_side_earth_edge", I4), + ("east_side_earth_edge", I4), + ("received_time_1", R8), # Typo in format description (I*4) + ("received_time_2", I4), + ("reserved", U1, (8, )) ]) -IMAGE_DATA_BLOCK_IR = np.dtype([('LCW', LINE_CONTROL_WORD), - ('DOC', U1, (256,)), # Omitted - ('image_data', U1, 3344)]) +IMAGE_DATA_BLOCK_IR = np.dtype([("LCW", LINE_CONTROL_WORD), + ("DOC", U1, (256,)), # Omitted + ("image_data", U1, 3344)]) -IMAGE_DATA_BLOCK_VIS = np.dtype([('LCW', LINE_CONTROL_WORD), - ('DOC', U1, (64,)), # Omitted - ('image_data', U1, (13376,))]) +IMAGE_DATA_BLOCK_VIS = np.dtype([("LCW", LINE_CONTROL_WORD), + ("DOC", U1, (64,)), # Omitted + ("image_data", U1, (13376,))]) IMAGE_DATA = { VIS_CHANNEL: { - 'offset': 6 * BLOCK_SIZE_VIS, - 'dtype': IMAGE_DATA_BLOCK_VIS, + "offset": 6 * BLOCK_SIZE_VIS, + "dtype": IMAGE_DATA_BLOCK_VIS, }, IR_CHANNEL: { - 'offset': 18 * BLOCK_SIZE_IR, - 'dtype': IMAGE_DATA_BLOCK_IR + "offset": 18 * BLOCK_SIZE_IR, + "dtype": IMAGE_DATA_BLOCK_IR } } diff --git a/satpy/readers/gms/gms5_vissr_l1b.py b/satpy/readers/gms/gms5_vissr_l1b.py index f3c6898f65..c8a88dfe25 100644 --- a/satpy/readers/gms/gms5_vissr_l1b.py +++ b/satpy/readers/gms/gms5_vissr_l1b.py @@ -29,8 +29,8 @@ scene.load(["VIS", "IR1"]) -References -~~~~~~~~~~ +References: +~~~~~~~~~~~ Details about platform, instrument and data format can be found in the following references: diff --git a/satpy/readers/gms/gms5_vissr_navigation.py b/satpy/readers/gms/gms5_vissr_navigation.py index 8a811b2210..6335ce13b9 100644 --- a/satpy/readers/gms/gms5_vissr_navigation.py +++ b/satpy/readers/gms/gms5_vissr_navigation.py @@ -445,6 +445,7 @@ def get_lon_lat(pixel, nav_params): pixel (Pixel): Point in image coordinates. nav_params (PixelNavigationParameters): Navigation parameters for a single pixel. + Returns: Longitude and latitude in degrees. """ @@ -481,6 +482,7 @@ def transform_image_coords_to_scanning_angles(point, image_offset, scanning_angl point (Pixel): Point in image coordinates. image_offset (ImageOffset): Image offset. scanning_angles (ScanningAngles): Scanning angles. + Returns: Scanning angles (x, y) at the pixel center (rad). """ @@ -677,6 +679,7 @@ def intersect_with_earth(view_vector, sat_pos, ellipsoid): coordinates. sat_pos (Vector3D): Satellite position in earth-fixed coordinates. ellipsoid (EarthEllipsoid): Earth ellipsoid. + Returns: Intersection (Vector3D) with the earth's surface. """ diff --git a/satpy/readers/goes_imager_hrit.py b/satpy/readers/goes_imager_hrit.py index 05bcc513d7..457d5d809c 100644 --- a/satpy/readers/goes_imager_hrit.py +++ b/satpy/readers/goes_imager_hrit.py @@ -46,7 +46,7 @@ class CalibrationError(Exception): """Dummy error-class.""" -logger = logging.getLogger('hrit_goes') +logger = logging.getLogger("hrit_goes") # Geometric constants [meters] EQUATOR_RADIUS = 6378169.00 @@ -54,30 +54,30 @@ class CalibrationError(Exception): ALTITUDE = 35785831.00 # goes implementation: -key_header = np.dtype([('key_number', 'u1'), - ('seed', '>f8')]) - -segment_identification = np.dtype([('GP_SC_ID', '>i2'), - ('spectral_channel_id', '>i1'), - ('segment_sequence_number', '>u2'), - ('planned_start_segment_number', '>u2'), - ('planned_end_segment_number', '>u2'), - ('data_field_representation', '>i1')]) - -image_segment_line_quality = np.dtype([('line_number_in_grid', '>i4'), - ('line_mean_acquisition', - [('days', '>u2'), - ('milliseconds', '>u4')]), - ('line_validity', 'u1'), - ('line_radiometric_quality', 'u1'), - ('line_geometric_quality', 'u1')]) +key_header = np.dtype([("key_number", "u1"), + ("seed", ">f8")]) + +segment_identification = np.dtype([("GP_SC_ID", ">i2"), + ("spectral_channel_id", ">i1"), + ("segment_sequence_number", ">u2"), + ("planned_start_segment_number", ">u2"), + ("planned_end_segment_number", ">u2"), + ("data_field_representation", ">i1")]) + +image_segment_line_quality = np.dtype([("line_number_in_grid", ">i4"), + ("line_mean_acquisition", + [("days", ">u2"), + ("milliseconds", ">u4")]), + ("line_validity", "u1"), + ("line_radiometric_quality", "u1"), + ("line_geometric_quality", "u1")]) goms_variable_length_headers = { - image_segment_line_quality: 'image_segment_line_quality'} + image_segment_line_quality: "image_segment_line_quality"} -goms_text_headers = {image_data_function: 'image_data_function', - annotation_header: 'annotation_header', - ancillary_text: 'ancillary_text'} +goms_text_headers = {image_data_function: "image_data_function", + annotation_header: "annotation_header", + ancillary_text: "ancillary_text"} goes_hdr_map = base_hdr_map.copy() goes_hdr_map.update({7: key_header, @@ -86,53 +86,53 @@ class CalibrationError(Exception): }) -orbit_coef = np.dtype([('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('X', '>f8', (8, )), - ('Y', '>f8', (8, )), - ('Z', '>f8', (8, )), - ('VX', '>f8', (8, )), - ('VY', '>f8', (8, )), - ('VZ', '>f8', (8, ))]) +orbit_coef = np.dtype([("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("X", ">f8", (8, )), + ("Y", ">f8", (8, )), + ("Z", ">f8", (8, )), + ("VX", ">f8", (8, )), + ("VY", ">f8", (8, )), + ("VZ", ">f8", (8, ))]) -attitude_coef = np.dtype([('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('XofSpinAxis', '>f8', (8, )), - ('YofSpinAxis', '>f8', (8, )), - ('ZofSpinAxis', '>f8', (8, ))]) +attitude_coef = np.dtype([("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("XofSpinAxis", ">f8", (8, )), + ("YofSpinAxis", ">f8", (8, )), + ("ZofSpinAxis", ">f8", (8, ))]) -cuc_time = np.dtype([('coarse', 'u1', (4, )), - ('fine', 'u1', (3, ))]) +cuc_time = np.dtype([("coarse", "u1", (4, )), + ("fine", "u1", (3, ))]) -sgs_time = np.dtype([('century', 'u1'), - ('year', 'u1'), - ('doy1', 'u1'), - ('doy_hours', 'u1'), - ('hours_mins', 'u1'), - ('mins_secs', 'u1'), - ('secs_msecs', 'u1'), - ('msecs', 'u1')]) +sgs_time = np.dtype([("century", "u1"), + ("year", "u1"), + ("doy1", "u1"), + ("doy_hours", "u1"), + ("hours_mins", "u1"), + ("mins_secs", "u1"), + ("secs_msecs", "u1"), + ("msecs", "u1")]) def make_sgs_time(sgs_time_array): """Make sgs time.""" - year = ((sgs_time_array['century'] >> 4) * 1000 + - (sgs_time_array['century'] & 15) * 100 + - (sgs_time_array['year'] >> 4) * 10 + - (sgs_time_array['year'] & 15)) - doy = ((sgs_time_array['doy1'] >> 4) * 100 + - (sgs_time_array['doy1'] & 15) * 10 + - (sgs_time_array['doy_hours'] >> 4)) - hours = ((sgs_time_array['doy_hours'] & 15) * 10 + - (sgs_time_array['hours_mins'] >> 4)) - mins = ((sgs_time_array['hours_mins'] & 15) * 10 + - (sgs_time_array['mins_secs'] >> 4)) - secs = ((sgs_time_array['mins_secs'] & 15) * 10 + - (sgs_time_array['secs_msecs'] >> 4)) - msecs = ((sgs_time_array['secs_msecs'] & 15) * 100 + - (sgs_time_array['msecs'] >> 4) * 10 + - (sgs_time_array['msecs'] & 15)) + year = ((sgs_time_array["century"] >> 4) * 1000 + + (sgs_time_array["century"] & 15) * 100 + + (sgs_time_array["year"] >> 4) * 10 + + (sgs_time_array["year"] & 15)) + doy = ((sgs_time_array["doy1"] >> 4) * 100 + + (sgs_time_array["doy1"] & 15) * 10 + + (sgs_time_array["doy_hours"] >> 4)) + hours = ((sgs_time_array["doy_hours"] & 15) * 10 + + (sgs_time_array["hours_mins"] >> 4)) + mins = ((sgs_time_array["hours_mins"] & 15) * 10 + + (sgs_time_array["mins_secs"] >> 4)) + secs = ((sgs_time_array["mins_secs"] & 15) * 10 + + (sgs_time_array["secs_msecs"] >> 4)) + msecs = ((sgs_time_array["secs_msecs"] & 15) * 100 + + (sgs_time_array["msecs"] >> 4) * 10 + + (sgs_time_array["msecs"] & 15)) return (datetime(int(year), 1, 1) + timedelta(days=int(doy - 1), hours=int(hours), @@ -156,7 +156,7 @@ def make_sgs_time(sgs_time_array): ("Cel", "u2'), - ("AbsoluteScanCount", '>u2'), - ("NorthernmostScanLine", '>u2'), - ("WesternmostPixel", '>u2'), - ("EasternmostPixel", '>u2'), - ("NorthernmostFrameLine", '>u2'), - ("SouthernmostFrameLine", '>u2'), - ("0Pixel", '>u2'), - ("0ScanLine", '>u2'), - ("0Scan", '>u2'), - ("SubSatScan", '>u2'), - ("SubSatPixel", '>u2'), + ("RelativeScanCount", ">u2"), + ("AbsoluteScanCount", ">u2"), + ("NorthernmostScanLine", ">u2"), + ("WesternmostPixel", ">u2"), + ("EasternmostPixel", ">u2"), + ("NorthernmostFrameLine", ">u2"), + ("SouthernmostFrameLine", ">u2"), + ("0Pixel", ">u2"), + ("0ScanLine", ">u2"), + ("0Scan", ">u2"), + ("SubSatScan", ">u2"), + ("SubSatPixel", ">u2"), ("SubSatLatitude", gvar_float), ("SubSatLongitude", gvar_float), ("Junk4", "u1", 96), # move to "word" 295 @@ -250,7 +250,7 @@ def __init__(self, filename, filename_info, filetype_info): def read_prologue(self): """Read the prologue metadata.""" with open(self.filename, "rb") as fp_: - fp_.seek(self.mda['total_header_length']) + fp_.seek(self.mda["total_header_length"]) data = np.fromfile(fp_, dtype=prologue, count=1) self.prologue.update(recarray2dict(data)) @@ -258,16 +258,16 @@ def read_prologue(self): def process_prologue(self): """Reprocess prologue to correct types.""" - for key in ['TCurr', 'TCHED', 'TCTRL', 'TLHED', 'TLTRL', 'TIPFS', - 'TINFS', 'TISPC', 'TIECL', 'TIBBC', 'TISTR', 'TLRAN', - 'TIIRT', 'TIVIT', 'TCLMT', 'TIONA']: + for key in ["TCurr", "TCHED", "TCTRL", "TLHED", "TLTRL", "TIPFS", + "TINFS", "TISPC", "TIECL", "TIBBC", "TISTR", "TLRAN", + "TIIRT", "TIVIT", "TCLMT", "TIONA"]: try: self.prologue[key] = make_sgs_time(self.prologue[key]) except ValueError: self.prologue.pop(key, None) logger.debug("Invalid data for %s", key) - for key in ['SubSatLatitude', "SubSatLongitude", "ReferenceLongitude", + for key in ["SubSatLatitude", "SubSatLongitude", "ReferenceLongitude", "ReferenceDistance", "ReferenceLatitude"]: self.prologue[key] = make_gvar_float(self.prologue[key]) @@ -352,7 +352,7 @@ def process_prologue(self): 14: "GOES-14", 15: "GOES-15"} -SENSOR_NAME = 'goes_imager' +SENSOR_NAME = "goes_imager" class HRITGOESFileHandler(HRITFileHandler): @@ -367,12 +367,12 @@ def __init__(self, filename, filename_info, filetype_info, goms_variable_length_headers, goms_text_headers)) self.prologue = prologue.prologue - self.chid = self.mda['spectral_channel_id'] + self.chid = self.mda["spectral_channel_id"] - sublon = self.prologue['SubSatLongitude'] - self.mda['projection_parameters']['SSP_longitude'] = sublon + sublon = self.prologue["SubSatLongitude"] + self.mda["projection_parameters"]["SSP_longitude"] = sublon - satellite_id = self.prologue['SatelliteID'] + satellite_id = self.prologue["SatelliteID"] self.platform_name = SPACECRAFTS[satellite_id] def get_dataset(self, key, info): @@ -380,17 +380,17 @@ def get_dataset(self, key, info): logger.debug("Getting raw data") res = super(HRITGOESFileHandler, self).get_dataset(key, info) - self.mda['calibration_parameters'] = self._get_calibration_params() + self.mda["calibration_parameters"] = self._get_calibration_params() - res = self.calibrate(res, key['calibration']) + res = self.calibrate(res, key["calibration"]) new_attrs = info.copy() new_attrs.update(res.attrs) res.attrs = new_attrs - res.attrs['platform_name'] = self.platform_name - res.attrs['sensor'] = SENSOR_NAME - res.attrs['orbital_parameters'] = {'projection_longitude': self.mda['projection_parameters']['SSP_longitude'], - 'projection_latitude': 0.0, - 'projection_altitude': ALTITUDE} + res.attrs["platform_name"] = self.platform_name + res.attrs["sensor"] = SENSOR_NAME + res.attrs["orbital_parameters"] = {"projection_longitude": self.mda["projection_parameters"]["SSP_longitude"], + "projection_latitude": 0.0, + "projection_altitude": ALTITUDE} return res def _get_calibration_params(self): @@ -398,9 +398,9 @@ def _get_calibration_params(self): params = {} idx_table = [] val_table = [] - for elt in self.mda['image_data_function'].split(b'\r\n'): + for elt in self.mda["image_data_function"].split(b"\r\n"): try: - key, val = elt.split(b':=') + key, val = elt.split(b":=") try: idx_table.append(int(key)) val_table.append(float(val)) @@ -408,19 +408,19 @@ def _get_calibration_params(self): params[key] = val except ValueError: pass - params['indices'] = np.array(idx_table) - params['values'] = np.array(val_table, dtype=np.float32) + params["indices"] = np.array(idx_table) + params["values"] = np.array(val_table, dtype=np.float32) return params def calibrate(self, data, calibration): """Calibrate the data.""" logger.debug("Calibration") tic = datetime.now() - if calibration == 'counts': + if calibration == "counts": return data - if calibration == 'reflectance': + if calibration == "reflectance": res = self._calibrate(data) - elif calibration == 'brightness_temperature': + elif calibration == "brightness_temperature": res = self._calibrate(data) else: raise NotImplementedError("Don't know how to calibrate to " + @@ -431,17 +431,17 @@ def calibrate(self, data, calibration): def _calibrate(self, data): """Calibrate *data*.""" - idx = self.mda['calibration_parameters']['indices'] - val = self.mda['calibration_parameters']['values'] + idx = self.mda["calibration_parameters"]["indices"] + val = self.mda["calibration_parameters"]["values"] data.data = da.where(data.data == 0, np.nan, data.data) ddata = data.data.map_blocks(np.interp, idx, val, dtype=val.dtype) res = xr.DataArray(ddata, dims=data.dims, attrs=data.attrs, coords=data.coords) res = res.clip(min=0) - units = {b'percent': '%', b'degree Kelvin': 'K'} - unit = self.mda['calibration_parameters'][b'_UNIT'] - res.attrs['units'] = units.get(unit, unit) + units = {b"percent": "%", b"degree Kelvin": "K"} + unit = self.mda["calibration_parameters"][b"_UNIT"] + res.attrs["units"] = units.get(unit, unit) return res def get_area_def(self, dataset_id): @@ -453,32 +453,32 @@ def get_area_def(self, dataset_id): return area def _get_proj_dict(self, dataset_id): - loff = np.float32(self.mda['loff']) - nlines = np.int32(self.mda['number_of_lines']) + loff = np.float32(self.mda["loff"]) + nlines = np.int32(self.mda["number_of_lines"]) loff = nlines - loff name_dict = get_geos_area_naming({ - 'platform_name': self.platform_name, - 'instrument_name': SENSOR_NAME, + "platform_name": self.platform_name, + "instrument_name": SENSOR_NAME, # Partial scans are padded to full disk - 'service_name': 'FD', - 'service_desc': 'Full Disk', - 'resolution': dataset_id['resolution'] + "service_name": "FD", + "service_desc": "Full Disk", + "resolution": dataset_id["resolution"] }) return { - 'a': EQUATOR_RADIUS, - 'b': POLE_RADIUS, - 'ssp_lon': float(self.prologue['SubSatLongitude']), - 'h': ALTITUDE, - 'proj': 'geos', - 'units': 'm', - 'a_name': name_dict['area_id'], - 'a_desc': name_dict['description'], - 'p_id': '', - 'nlines': nlines, - 'ncols': np.int32(self.mda['number_of_columns']), - 'cfac': np.int32(self.mda['cfac']), - 'lfac': np.int32(self.mda['lfac']), - 'coff': np.float32(self.mda['coff']), - 'loff': loff, - 'scandir': 'N2S' + "a": EQUATOR_RADIUS, + "b": POLE_RADIUS, + "ssp_lon": float(self.prologue["SubSatLongitude"]), + "h": ALTITUDE, + "proj": "geos", + "units": "m", + "a_name": name_dict["area_id"], + "a_desc": name_dict["description"], + "p_id": "", + "nlines": nlines, + "ncols": np.int32(self.mda["number_of_columns"]), + "cfac": np.int32(self.mda["cfac"]), + "lfac": np.int32(self.mda["lfac"]), + "coff": np.float32(self.mda["coff"]), + "loff": loff, + "scandir": "N2S" } diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index 68932531a8..214852fffd 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -200,8 +200,8 @@ 6. Data is received via EumetCast -References -========== +References: +=========== - `[GVAR]`_ GVAR transmission format - `[BOOK-N]`_ GOES-N databook @@ -278,287 +278,287 @@ SCALE_13_3 = 5.5297 OFFSET_13_3 = 16.5892 CALIB_COEFS = { - 'GOES-15': {'00_7': {'slope': [5.851966E-1, 5.879772E-1, 5.856793E-1, + "GOES-15": {"00_7": {"slope": [5.851966E-1, 5.879772E-1, 5.856793E-1, 5.854250E-1, 5.866992E-1, 5.836241E-1, 5.846555E-1, 5.843753E-1], - 'offset': [-16.9707, -17.0513, -16.9847, -16.9773, + "offset": [-16.9707, -17.0513, -16.9847, -16.9773, -17.0143, -16.9251, -16.9550, -16.9469], - 'x0': 29, - 'k': 1.88852E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2562.7905, 2562.7905], - 'a': [-1.5693377, -1.5693377], - 'b': [1.0025034, 1.0025034], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_5': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1521.1988, 1521.5277], - 'a': [-3.4706545, -3.4755568], - 'b': [1.0093296, 1.0092838], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [935.89417, 935.78158], - 'a': [-0.36151367, -0.35316361], - 'b': [1.0012715, 1.0012570], - 'btmin': 180.0, - 'btmax': 340.0}, - '13_3': {'scale': SCALE_13_3, - 'offset': OFFSET_13_3, - 'n': [753.72229, 753.93403], - 'a': [-0.21475817, -0.24630068], - 'b': [1.0006485, 1.0007178], - 'btmin': 180.0, - 'btmax': 340.0} + "x0": 29, + "k": 1.88852E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2562.7905, 2562.7905], + "a": [-1.5693377, -1.5693377], + "b": [1.0025034, 1.0025034], + "btmin": 205.0, + "btmax": 340.0}, + "06_5": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1521.1988, 1521.5277], + "a": [-3.4706545, -3.4755568], + "b": [1.0093296, 1.0092838], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [935.89417, 935.78158], + "a": [-0.36151367, -0.35316361], + "b": [1.0012715, 1.0012570], + "btmin": 180.0, + "btmax": 340.0}, + "13_3": {"scale": SCALE_13_3, + "offset": OFFSET_13_3, + "n": [753.72229, 753.93403], + "a": [-0.21475817, -0.24630068], + "b": [1.0006485, 1.0007178], + "btmin": 180.0, + "btmax": 340.0} }, # ITT RevH + STAR Correction - 'GOES-14': {'00_7': {'slope': [5.874693E-1, 5.865367E-1, 5.862807E-1, + "GOES-14": {"00_7": {"slope": [5.874693E-1, 5.865367E-1, 5.862807E-1, 5.864086E-1, 5.857146E-1, 5.852004E-1, 5.860814E-1, 5.841697E-1], - 'offset': [-17.037, -17.010, -17.002, -17.006, + "offset": [-17.037, -17.010, -17.002, -17.006, -16.986, -16.971, -16.996, -16.941], - 'x0': 29, - 'k': 1.88772E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2577.3518, 2577.3518], - 'a': [-1.5297091, -1.5297091], - 'b': [1.0025608, 1.0025608], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_5': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1519.3488, 1518.5610], - 'a': [-3.4647892, -3.4390527], - 'b': [1.0093656, 1.0094427], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [933.98541, 934.19579], - 'a': [-0.29201763, -0.31824779], - 'b': [1.0012018, 1.0012303], - 'btmin': 180.0, - 'btmax': 340.0}, - '13_3': {'scale': SCALE_13_3, - 'offset': OFFSET_13_3, - 'n': [752.88143, 752.82392], - 'a': [-0.22508805, -0.21700982], - 'b': [1.0006686, 1.0006503], - 'btmin': 180.0, - 'btmax': 340.0} + "x0": 29, + "k": 1.88772E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2577.3518, 2577.3518], + "a": [-1.5297091, -1.5297091], + "b": [1.0025608, 1.0025608], + "btmin": 205.0, + "btmax": 340.0}, + "06_5": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1519.3488, 1518.5610], + "a": [-3.4647892, -3.4390527], + "b": [1.0093656, 1.0094427], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [933.98541, 934.19579], + "a": [-0.29201763, -0.31824779], + "b": [1.0012018, 1.0012303], + "btmin": 180.0, + "btmax": 340.0}, + "13_3": {"scale": SCALE_13_3, + "offset": OFFSET_13_3, + "n": [752.88143, 752.82392], + "a": [-0.22508805, -0.21700982], + "b": [1.0006686, 1.0006503], + "btmin": 180.0, + "btmax": 340.0} }, # ITT RevH + STAR Correction - 'GOES-13': {'00_7': {'slope': [6.120196E-1, 6.118504E-1, 6.096360E-1, + "GOES-13": {"00_7": {"slope": [6.120196E-1, 6.118504E-1, 6.096360E-1, 6.087055E-1, 6.132860E-1, 6.118208E-1, 6.122307E-1, 6.066968E-1], - 'offset': [-17.749, -17.744, -17.769, -17.653, + "offset": [-17.749, -17.744, -17.769, -17.653, -17.785, -17.743, -17.755, -17.594], - 'x0': 29, - 'k': 1.89544E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2561.74, 2561.74], - 'a': [-1.437204, -1.437204], - 'b': [1.002562, 1.002562], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_5': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1522.52, 1521.66], - 'a': [-3.625663, -3.607841], - 'b': [1.010018, 1.010010], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [937.23, 937.27], - 'a': [-0.386043, -0.380113], - 'b': [1.001298, 1.001285], - 'btmin': 180.0, - 'btmax': 340.0}, - '13_3': {'scale': SCALE_13_3, - 'offset': OFFSET_13_3, - 'n': [749.83], - 'a': [-0.134801], - 'b': [1.000482], - 'btmin': 180.0, - 'btmax': 340.0} # Has only one detector on GOES-13 + "x0": 29, + "k": 1.89544E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2561.74, 2561.74], + "a": [-1.437204, -1.437204], + "b": [1.002562, 1.002562], + "btmin": 205.0, + "btmax": 340.0}, + "06_5": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1522.52, 1521.66], + "a": [-3.625663, -3.607841], + "b": [1.010018, 1.010010], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [937.23, 937.27], + "a": [-0.386043, -0.380113], + "b": [1.001298, 1.001285], + "btmin": 180.0, + "btmax": 340.0}, + "13_3": {"scale": SCALE_13_3, + "offset": OFFSET_13_3, + "n": [749.83], + "a": [-0.134801], + "b": [1.000482], + "btmin": 180.0, + "btmax": 340.0} # Has only one detector on GOES-13 }, - 'GOES-12': {'00_7': {'slope': [5.771030E-1, 5.761764E-1, 5.775825E-1, + "GOES-12": {"00_7": {"slope": [5.771030E-1, 5.761764E-1, 5.775825E-1, 5.790699E-1, 5.787051E-1, 5.755969E-1, 5.753973E-1, 5.752099E-1], - 'offset': [-16.736, -16.709, -16.750, -16.793, + "offset": [-16.736, -16.709, -16.750, -16.793, -16.782, -16.692, -16.687, -16.681], - 'x0': 29, - 'k': 1.97658E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2562.45, 2562.45], - 'a': [-0.650731, -0.650731], - 'b': [1.001520, 1.001520], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_5': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1536.43, 1536.94], - 'a': [-4.764728, -4.775517], - 'b': [1.012420, 1.012403], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [933.21, 933.21], - 'a': [-0.360331, -0.360331], - 'b': [1.001306, 1.001306], - 'btmin': 180.0, - 'btmax': 340.0}, - '13_3': {'scale': SCALE_13_3, - 'offset': OFFSET_13_3, - 'n': [751.91], - 'a': [-0.253449], - 'b': [1.000743], - 'btmin': 180.0, - 'btmax': 340.0} # Has only one detector on GOES-12 + "x0": 29, + "k": 1.97658E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2562.45, 2562.45], + "a": [-0.650731, -0.650731], + "b": [1.001520, 1.001520], + "btmin": 205.0, + "btmax": 340.0}, + "06_5": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1536.43, 1536.94], + "a": [-4.764728, -4.775517], + "b": [1.012420, 1.012403], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [933.21, 933.21], + "a": [-0.360331, -0.360331], + "b": [1.001306, 1.001306], + "btmin": 180.0, + "btmax": 340.0}, + "13_3": {"scale": SCALE_13_3, + "offset": OFFSET_13_3, + "n": [751.91], + "a": [-0.253449], + "b": [1.000743], + "btmin": 180.0, + "btmax": 340.0} # Has only one detector on GOES-12 }, - 'GOES-11': {'00_7': {'slope': [5.561568E-1, 5.552979E-1, 5.558981E-1, + "GOES-11": {"00_7": {"slope": [5.561568E-1, 5.552979E-1, 5.558981E-1, 5.577627E-1, 5.557238E-1, 5.587978E-1, 5.586530E-1, 5.528971E-1], - 'offset': [-16.129, -16.104, -16.121, -16.175, + "offset": [-16.129, -16.104, -16.121, -16.175, -16.116, -16.205, -16.201, -16.034], - 'x0': 29, - 'k': 2.01524E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2562.07, 2562.07], - 'a': [-0.644790, -0.644790], - 'b': [1.000775, 1.000775], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_8': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1481.53], - 'a': [-0.543401], - 'b': [1.001495], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [931.76, 931.76], - 'a': [-0.306809, -0.306809], - 'b': [1.001274, 1.001274], - 'btmin': 180.0, - 'btmax': 340.0}, - '12_0': {'scale': SCALE_12_0, - 'offset': OFFSET_12_0, - 'n': [833.67, 833.04], - 'a': [-0.333216, -0.315110], - 'b': [1.001000, 1.000967], - 'btmin': 180.0, - 'btmax': 340.0} + "x0": 29, + "k": 2.01524E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2562.07, 2562.07], + "a": [-0.644790, -0.644790], + "b": [1.000775, 1.000775], + "btmin": 205.0, + "btmax": 340.0}, + "06_8": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1481.53], + "a": [-0.543401], + "b": [1.001495], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [931.76, 931.76], + "a": [-0.306809, -0.306809], + "b": [1.001274, 1.001274], + "btmin": 180.0, + "btmax": 340.0}, + "12_0": {"scale": SCALE_12_0, + "offset": OFFSET_12_0, + "n": [833.67, 833.04], + "a": [-0.333216, -0.315110], + "b": [1.001000, 1.000967], + "btmin": 180.0, + "btmax": 340.0} }, - 'GOES-10': {'00_7': {'slope': [5.605602E-1, 5.563529E-1, 5.566574E-1, + "GOES-10": {"00_7": {"slope": [5.605602E-1, 5.563529E-1, 5.566574E-1, 5.582154E-1, 5.583361E-1, 5.571736E-1, 5.563135E-1, 5.613536E-1], - 'offset': [-16.256, -16.134, -16.143, -16.188, + "offset": [-16.256, -16.134, -16.143, -16.188, -16.192, -16.158, -16.133, -16.279], - 'x0': 29, - 'k': 1.98808E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2552.9845, 2552.9845], - 'a': [-0.60584483, -0.60584483], - 'b': [1.0011017, 1.0011017], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_8': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1486.2212], - 'a': [-0.61653805], - 'b': [1.0014011], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [936.10260, 935.98981], - 'a': [-0.27128884, -0.27064036], - 'b': [1.0009674, 1.0009687], - 'btmin': 180.0, - 'btmax': 340.0}, - '12_0': {'scale': SCALE_12_0, - 'offset': OFFSET_12_0, - 'n': [830.88473, 830.89691], - 'a': [-0.26505411, -0.26056452], - 'b': [1.0009087, 1.0008962], - 'btmin': 180.0, - 'btmax': 340.0} + "x0": 29, + "k": 1.98808E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2552.9845, 2552.9845], + "a": [-0.60584483, -0.60584483], + "b": [1.0011017, 1.0011017], + "btmin": 205.0, + "btmax": 340.0}, + "06_8": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1486.2212], + "a": [-0.61653805], + "b": [1.0014011], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [936.10260, 935.98981], + "a": [-0.27128884, -0.27064036], + "b": [1.0009674, 1.0009687], + "btmin": 180.0, + "btmax": 340.0}, + "12_0": {"scale": SCALE_12_0, + "offset": OFFSET_12_0, + "n": [830.88473, 830.89691], + "a": [-0.26505411, -0.26056452], + "b": [1.0009087, 1.0008962], + "btmin": 180.0, + "btmax": 340.0} }, - 'GOES-9': {'00_7': {'slope': [0.5492361], - 'offset': [-15.928], - 'x0': 29, - 'k': 1.94180E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2555.18, 2555.18], - 'a': [-0.579908, -0.579908], - 'b': [1.000942, 1.000942], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_8': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1481.82], - 'a': [-0.493016], - 'b': [1.001076], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [934.59, 934.28], - 'a': [-0.384798, -0.363703], - 'b': [1.001293, 1.001272], - 'btmin': 180.0, - 'btmax': 340.0}, - '12_0': {'scale': SCALE_12_0, - 'offset': OFFSET_12_0, - 'n': [834.02, 834.09], - 'a': [-0.302995, -0.306838], - 'b': [1.000941, 1.000948], - 'btmin': 180.0, - 'btmax': 340.0} + "GOES-9": {"00_7": {"slope": [0.5492361], + "offset": [-15.928], + "x0": 29, + "k": 1.94180E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2555.18, 2555.18], + "a": [-0.579908, -0.579908], + "b": [1.000942, 1.000942], + "btmin": 205.0, + "btmax": 340.0}, + "06_8": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1481.82], + "a": [-0.493016], + "b": [1.001076], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [934.59, 934.28], + "a": [-0.384798, -0.363703], + "b": [1.001293, 1.001272], + "btmin": 180.0, + "btmax": 340.0}, + "12_0": {"scale": SCALE_12_0, + "offset": OFFSET_12_0, + "n": [834.02, 834.09], + "a": [-0.302995, -0.306838], + "b": [1.000941, 1.000948], + "btmin": 180.0, + "btmax": 340.0} }, - 'GOES-8': {'00_7': {'slope': [0.5501873], - 'offset': [-15.955], - 'x0': 29, - 'k': 1.92979E-3}, - '03_9': {'scale': SCALE_03_9, - 'offset': OFFSET_03_9, - 'n': [2556.71, 2558.62], - 'a': [-0.578526, -0.581853], - 'b': [1.001512, 1.001532], - 'btmin': 205.0, - 'btmax': 340.0}, - '06_8': {'scale': SCALE_06_8, - 'offset': OFFSET_06_8, - 'n': [1481.91], - 'a': [-0.593903], - 'b': [1.001418], - 'btmin': 180.0, - 'btmax': 340.0}, - '10_7': {'scale': SCALE_10_7, - 'offset': OFFSET_10_7, - 'n': [934.30, 935.38], - 'a': [-0.322585, -0.351889], - 'b': [1.001271, 1.001293], - 'btmin': 180.0, - 'btmax': 340.0}, - '12_0': {'scale': SCALE_12_0, - 'offset': OFFSET_12_0, - 'n': [837.06, 837.00], - 'a': [-0.422571, -0.466954], - 'b': [1.001170, 1.001257], - 'btmin': 180.0, - 'btmax': 340.0} + "GOES-8": {"00_7": {"slope": [0.5501873], + "offset": [-15.955], + "x0": 29, + "k": 1.92979E-3}, + "03_9": {"scale": SCALE_03_9, + "offset": OFFSET_03_9, + "n": [2556.71, 2558.62], + "a": [-0.578526, -0.581853], + "b": [1.001512, 1.001532], + "btmin": 205.0, + "btmax": 340.0}, + "06_8": {"scale": SCALE_06_8, + "offset": OFFSET_06_8, + "n": [1481.91], + "a": [-0.593903], + "b": [1.001418], + "btmin": 180.0, + "btmax": 340.0}, + "10_7": {"scale": SCALE_10_7, + "offset": OFFSET_10_7, + "n": [934.30, 935.38], + "a": [-0.322585, -0.351889], + "b": [1.001271, 1.001293], + "btmin": 180.0, + "btmax": 340.0}, + "12_0": {"scale": SCALE_12_0, + "offset": OFFSET_12_0, + "n": [837.06, 837.00], + "a": [-0.422571, -0.466954], + "b": [1.001170, 1.001257], + "btmin": 180.0, + "btmax": 340.0} } } @@ -569,12 +569,12 @@ SAMPLING_NS_IR = 112E-6 # Sector definitions. TODO: Add remaining sectors (PACUS, CONUS, ...) -FULL_DISC = 'Full Disc' -NORTH_HEMIS_EAST = 'Northern Hemisphere (GOES-East)' -SOUTH_HEMIS_EAST = 'Southern Hemisphere (GOES-East)' -NORTH_HEMIS_WEST = 'Northern Hemisphere (GOES-West)' -SOUTH_HEMIS_WEST = 'Southern Hemisphere (GOES-West)' -UNKNOWN_SECTOR = 'Unknown' +FULL_DISC = "Full Disc" +NORTH_HEMIS_EAST = "Northern Hemisphere (GOES-East)" +SOUTH_HEMIS_EAST = "Southern Hemisphere (GOES-East)" +NORTH_HEMIS_WEST = "Northern Hemisphere (GOES-West)" +SOUTH_HEMIS_WEST = "Southern Hemisphere (GOES-West)" +UNKNOWN_SECTOR = "Unknown" IR_SECTORS = { (2704, 5208): FULL_DISC, @@ -613,14 +613,14 @@ def __init__(self, filename, filename_info, filetype_info, geo_data=None): self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, - chunks={'xc': CHUNK_SIZE, 'yc': CHUNK_SIZE}) - self.sensor = 'goes_imager' - self.nlines = self.nc.dims['yc'] - self.ncols = self.nc.dims['xc'] + chunks={"xc": CHUNK_SIZE, "yc": CHUNK_SIZE}) + self.sensor = "goes_imager" + self.nlines = self.nc.dims["yc"] + self.ncols = self.nc.dims["xc"] self.platform_name = self._get_platform_name( - self.nc.attrs['Satellite Sensor']) - self.platform_shortname = self.platform_name.replace('-', '').lower() - self.gvar_channel = int(self.nc['bands'].values) + self.nc.attrs["Satellite Sensor"]) + self.platform_shortname = self.platform_name.replace("-", "").lower() + self.gvar_channel = int(self.nc["bands"].values) self.sector = self._get_sector(channel=self.gvar_channel, nlines=self.nlines, ncols=self.ncols) @@ -652,7 +652,7 @@ def ir_sectors(self): @staticmethod def _get_platform_name(ncattr): """Determine name of the platform.""" - match = re.match(r'G-(\d+)', ncattr) + match = re.match(r"G-(\d+)", ncattr) if match: return SPACECRAFTS.get(int(match.groups()[0])) @@ -681,7 +681,7 @@ def _get_earth_mask(lat): Returns: Mask (1=earth, 0=space) """ - logger.debug('Computing earth mask') + logger.debug("Computing earth mask") return np.fabs(lat) <= 90 @staticmethod @@ -695,7 +695,7 @@ def _get_nadir_pixel(earth_mask, sector): nadir row, nadir column """ if sector == FULL_DISC: - logger.debug('Computing nadir pixel') + logger.debug("Computing nadir pixel") # The earth is not centered in the image, compute bounding box # of the earth disc first @@ -711,7 +711,7 @@ def _get_nadir_pixel(earth_mask, sector): def _is_yaw_flip(self, lat): """Determine whether the satellite is yaw-flipped ('upside down').""" - logger.debug('Computing yaw flip flag') + logger.debug("Computing yaw flip flag") # In case of yaw-flip the data and coordinates in the netCDF files are # also flipped. Just check whether the latitude increases or decrases # with the line number. @@ -721,7 +721,7 @@ def _is_yaw_flip(self, lat): def _get_area_def_uniform_sampling(self, lon0, channel): """Get area definition with uniform sampling.""" - logger.debug('Computing area definition') + logger.debug("Computing area definition") if lon0 is not None: est = AreaDefEstimator(self.platform_name, channel) return est.get_area_def_with_uniform_sampling(lon0) @@ -730,7 +730,7 @@ def _get_area_def_uniform_sampling(self, lon0, channel): @property def start_time(self): """Start timestamp of the dataset.""" - dt = self.nc['time'].dt + dt = self.nc["time"].dt return datetime(year=int(dt.year), month=int(dt.month), day=int(dt.day), hour=int(dt.hour), minute=int(dt.minute), second=int(dt.second), microsecond=int(dt.microsecond)) @@ -757,7 +757,7 @@ def resolution(self): Returns: Spatial resolution in kilometers """ - return 1000. * self.nc['lineRes'].values + return 1000. * self.nc["lineRes"].values def get_shape(self, key, info): """Get the shape of the data. @@ -772,7 +772,7 @@ def meta(self): """Derive metadata from the coordinates.""" # Use buffered data if available if self._meta is None: - lat = self.geo_data['lat'] + lat = self.geo_data["lat"] earth_mask = self._get_earth_mask(lat) crow, ccol = self._get_nadir_pixel(earth_mask=earth_mask, sector=self.sector) @@ -780,55 +780,55 @@ def meta(self): yaw_flip = self._is_yaw_flip(lat) del lat - lon = self.geo_data['lon'] + lon = self.geo_data["lon"] lon0 = lon.values[crow, ccol] if crow is not None else None area_def_uni = self._get_area_def_uniform_sampling( lon0=lon0, channel=self.gvar_channel) del lon - self._meta = {'earth_mask': earth_mask, - 'yaw_flip': yaw_flip, - 'lat0': lat0, - 'lon0': lon0, - 'nadir_row': crow, - 'nadir_col': ccol, - 'area_def_uni': area_def_uni} + self._meta = {"earth_mask": earth_mask, + "yaw_flip": yaw_flip, + "lat0": lat0, + "lon0": lon0, + "nadir_row": crow, + "nadir_col": ccol, + "area_def_uni": area_def_uni} return self._meta def _counts2radiance(self, counts, coefs, channel): """Convert raw detector counts to radiance.""" - logger.debug('Converting counts to radiance') + logger.debug("Converting counts to radiance") if is_vis_channel(channel): # Since the scanline-detector assignment is unknown, use the average # coefficients for all scanlines. - slope = np.array(coefs['slope']).mean() - offset = np.array(coefs['offset']).mean() + slope = np.array(coefs["slope"]).mean() + offset = np.array(coefs["offset"]).mean() return self._viscounts2radiance(counts=counts, slope=slope, offset=offset) - return self._ircounts2radiance(counts=counts, scale=coefs['scale'], - offset=coefs['offset']) + return self._ircounts2radiance(counts=counts, scale=coefs["scale"], + offset=coefs["offset"]) def _calibrate(self, radiance, coefs, channel, calibration): """Convert radiance to reflectance or brightness temperature.""" if is_vis_channel(channel): - if not calibration == 'reflectance': - raise ValueError('Cannot calibrate VIS channel to ' - '{}'.format(calibration)) - return self._calibrate_vis(radiance=radiance, k=coefs['k']) + if not calibration == "reflectance": + raise ValueError("Cannot calibrate VIS channel to " + "{}".format(calibration)) + return self._calibrate_vis(radiance=radiance, k=coefs["k"]) else: - if not calibration == 'brightness_temperature': - raise ValueError('Cannot calibrate IR channel to ' - '{}'.format(calibration)) + if not calibration == "brightness_temperature": + raise ValueError("Cannot calibrate IR channel to " + "{}".format(calibration)) # Since the scanline-detector assignment is unknown, use the average # coefficients for all scanlines. - mean_coefs = {'a': np.array(coefs['a']).mean(), - 'b': np.array(coefs['b']).mean(), - 'n': np.array(coefs['n']).mean(), - 'btmin': coefs['btmin'], - 'btmax': coefs['btmax']} + mean_coefs = {"a": np.array(coefs["a"]).mean(), + "b": np.array(coefs["b"]).mean(), + "n": np.array(coefs["n"]).mean(), + "btmin": coefs["btmin"], + "btmax": coefs["btmax"]} return self._calibrate_ir(radiance=radiance, coefs=mean_coefs) @staticmethod @@ -866,16 +866,16 @@ def _calibrate_ir(radiance, coefs): Returns: Brightness temperature [K] """ - logger.debug('Calibrating to brightness temperature') + logger.debug("Calibrating to brightness temperature") # Compute brightness temperature using inverse Planck formula - n = coefs['n'] + n = coefs["n"] bteff = C2 * n / np.log(1 + C1 * n ** 3 / radiance.where(radiance > 0)) - bt = xr.DataArray(bteff * coefs['b'] + coefs['a']) + bt = xr.DataArray(bteff * coefs["b"] + coefs["a"]) # Apply BT threshold - return bt.where(np.logical_and(bt >= coefs['btmin'], - bt <= coefs['btmax'])) + return bt.where(np.logical_and(bt >= coefs["btmin"], + bt <= coefs["btmax"])) @staticmethod def _viscounts2radiance(counts, slope, offset): @@ -887,6 +887,7 @@ def _viscounts2radiance(counts, slope, offset): counts: Raw detector counts slope: Slope [W m-2 um-1 sr-1] offset: Offset [W m-2 um-1 sr-1] + Returns: Radiance [W m-2 um-1 sr-1] """ @@ -913,10 +914,11 @@ def _calibrate_vis(radiance, k): k: pi / H, where H is the solar spectral irradiance at annual-average sun-earth distance, averaged over the spectral response function of the detector). Units of k: [m2 um sr W-1] + Returns: Reflectance [%] """ - logger.debug('Calibrating to reflectance') + logger.debug("Calibrating to reflectance") refl = 100 * k * radiance return refl.clip(min=0) @@ -928,28 +930,28 @@ def _update_metadata(self, data, ds_info): # If the file_type attribute is a list and the data is xarray # the concat of the dataset will not work. As the file_type is # not needed this will be popped here. - if 'file_type' in data.attrs: - data.attrs.pop('file_type') + if "file_type" in data.attrs: + data.attrs.pop("file_type") # Metadata discovered from the file. data.attrs.update( - {'platform_name': self.platform_name, - 'sensor': self.sensor, - 'sector': self.sector, - 'orbital_parameters': {'yaw_flip': self.meta['yaw_flip']}} + {"platform_name": self.platform_name, + "sensor": self.sensor, + "sector": self.sector, + "orbital_parameters": {"yaw_flip": self.meta["yaw_flip"]}} ) - if self.meta['lon0'] is not None: + if self.meta["lon0"] is not None: # Attributes only available for full disc images. YAML reader # doesn't like it if satellite_* is present but None data.attrs.update( - {'nadir_row': self.meta['nadir_row'], - 'nadir_col': self.meta['nadir_col'], - 'area_def_uniform_sampling': self.meta['area_def_uni']} + {"nadir_row": self.meta["nadir_row"], + "nadir_col": self.meta["nadir_col"], + "area_def_uniform_sampling": self.meta["area_def_uni"]} ) - data.attrs['orbital_parameters'].update( - {'projection_longitude': self.meta['lon0'], - 'projection_latitude': self.meta['lat0'], - 'projection_altitude': ALTITUDE} + data.attrs["orbital_parameters"].update( + {"projection_longitude": self.meta["lon0"], + "projection_latitude": self.meta["lat0"], + "projection_altitude": ALTITUDE} ) def __del__(self): @@ -977,10 +979,10 @@ def available_datasets(self, configured_datasets=None): if is_avail is not None: yield is_avail, ds_info - matches = self.file_type_matches(ds_info['file_type']) - if matches and ds_info.get('resolution') != res: + matches = self.file_type_matches(ds_info["file_type"]) + if matches and ds_info.get("resolution") != res: new_info = ds_info.copy() - new_info['resolution'] = res + new_info["resolution"] = res yield True, new_info elif is_avail is None: yield is_avail, ds_info @@ -989,10 +991,10 @@ def available_datasets(self, configured_datasets=None): def is_vis_channel(channel): """Determine whether the given channel is a visible channel.""" if isinstance(channel, str): - return channel == '00_7' + return channel == "00_7" if isinstance(channel, int): return channel == 1 - raise ValueError('Invalid channel') + raise ValueError("Invalid channel") class GOESNCFileHandler(GOESNCBaseFileHandler): @@ -1008,25 +1010,25 @@ def __init__(self, filename, filename_info, filetype_info): def get_dataset(self, key, info): """Load dataset designated by the given key from file.""" - logger.debug('Reading dataset {}'.format(key['name'])) + logger.debug("Reading dataset {}".format(key["name"])) # Read data from file and calibrate if necessary - if 'longitude' in key['name']: - data = self.geo_data['lon'] - elif 'latitude' in key['name']: - data = self.geo_data['lat'] + if "longitude" in key["name"]: + data = self.geo_data["lon"] + elif "latitude" in key["name"]: + data = self.geo_data["lat"] else: tic = datetime.now() - data = self.calibrate(self.nc['data'].isel(time=0), - calibration=key['calibration'], - channel=key['name']) - logger.debug('Calibration time: {}'.format(datetime.now() - tic)) + data = self.calibrate(self.nc["data"].isel(time=0), + calibration=key["calibration"], + channel=key["name"]) + logger.debug("Calibration time: {}".format(datetime.now() - tic)) # Mask space pixels - data = data.where(self.meta['earth_mask']) + data = data.where(self.meta["earth_mask"]) # Set proper dimension names - data = data.rename({'xc': 'x', 'yc': 'y'}) + data = data.rename({"xc": "x", "yc": "y"}) # Update metadata self._update_metadata(data, ds_info=info) @@ -1040,19 +1042,19 @@ def calibrate(self, counts, calibration, channel): counts = counts / 32. coefs = CALIB_COEFS[self.platform_name][channel] - if calibration == 'counts': + if calibration == "counts": return counts - if calibration in ['radiance', 'reflectance', - 'brightness_temperature']: + if calibration in ["radiance", "reflectance", + "brightness_temperature"]: radiance = self._counts2radiance(counts=counts, coefs=coefs, channel=channel) - if calibration == 'radiance': + if calibration == "radiance": return radiance return self._calibrate(radiance=radiance, coefs=coefs, channel=channel, calibration=calibration) - raise ValueError('Unsupported calibration for channel {}: {}'.format(channel, calibration)) + raise ValueError("Unsupported calibration for channel {}: {}".format(channel, calibration)) class GOESEUMNCFileHandler(GOESNCBaseFileHandler): @@ -1072,20 +1074,20 @@ def __init__(self, filename, filename_info, filetype_info, geo_data): def get_dataset(self, key, info): """Load dataset designated by the given key from file.""" - logger.debug('Reading dataset {}'.format(key['name'])) + logger.debug("Reading dataset {}".format(key["name"])) tic = datetime.now() - data = self.calibrate(self.nc['data'].isel(time=0), - calibration=key['calibration'], - channel=key['name']) - logger.debug('Calibration time: {}'.format(datetime.now() - tic)) + data = self.calibrate(self.nc["data"].isel(time=0), + calibration=key["calibration"], + channel=key["name"]) + logger.debug("Calibration time: {}".format(datetime.now() - tic)) # Mask space pixels - data = data.where(self.meta['earth_mask']) + data = data.where(self.meta["earth_mask"]) # Set proper dimension names - data = data.rename({'xc': 'x', 'yc': 'y'}) - data = data.drop('time') + data = data.rename({"xc": "x", "yc": "y"}) + data = data.drop("time") # Update metadata self._update_metadata(data, ds_info=info) @@ -1098,15 +1100,15 @@ def calibrate(self, data, calibration, channel): is_vis = is_vis_channel(channel) # IR files provide radiances, VIS file provides reflectances - if is_vis and calibration == 'reflectance': + if is_vis and calibration == "reflectance": return data - if not is_vis and calibration == 'radiance': + if not is_vis and calibration == "radiance": return data - if not is_vis and calibration == 'brightness_temperature': + if not is_vis and calibration == "brightness_temperature": return self._calibrate(radiance=data, calibration=calibration, coefs=coefs, channel=channel) - raise ValueError('Unsupported calibration for channel {}: {}' + raise ValueError("Unsupported calibration for channel {}: {}" .format(channel, calibration)) @@ -1120,13 +1122,13 @@ def __init__(self, filename, filename_info, filetype_info): self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, - chunks={'xc': CHUNK_SIZE, 'yc': CHUNK_SIZE}) - self.sensor = 'goes_imager' - self.nlines = self.nc.dims['yc'] - self.ncols = self.nc.dims['xc'] + chunks={"xc": CHUNK_SIZE, "yc": CHUNK_SIZE}) + self.sensor = "goes_imager" + self.nlines = self.nc.dims["yc"] + self.ncols = self.nc.dims["xc"] self.platform_name = GOESNCBaseFileHandler._get_platform_name( - self.nc.attrs['Satellite Sensor']) - self.platform_shortname = self.platform_name.replace('-', '').lower() + self.nc.attrs["Satellite Sensor"]) + self.platform_shortname = self.platform_name.replace("-", "").lower() self._meta = None def __getitem__(self, item): @@ -1135,18 +1137,18 @@ def __getitem__(self, item): def get_dataset(self, key, info): """Load dataset designated by the given key from file.""" - logger.debug('Reading dataset {}'.format(key['name'])) + logger.debug("Reading dataset {}".format(key["name"])) # Read data from file and calibrate if necessary - if 'longitude' in key['name']: - data = self.nc['lon'] - elif 'latitude' in key['name']: - data = self.nc['lat'] + if "longitude" in key["name"]: + data = self.nc["lon"] + elif "latitude" in key["name"]: + data = self.nc["lat"] else: - raise KeyError("Unknown dataset: {}".format(key['name'])) + raise KeyError("Unknown dataset: {}".format(key["name"])) # Set proper dimension names - data = data.rename({'xc': 'x', 'yc': 'y'}) + data = data.rename({"xc": "x", "yc": "y"}) # Update metadata data.attrs.update(info) @@ -1165,36 +1167,36 @@ class GOESCoefficientReader(object): """Read GOES Imager calibration coefficients from NOAA reference HTMLs.""" gvar_channels = { - 'GOES-8': {'00_7': 1, '03_9': 2, '06_8': 3, '10_7': 4, '12_0': 5}, - 'GOES-9': {'00_7': 1, '03_9': 2, '06_8': 3, '10_7': 4, '12_0': 5}, - 'GOES-10': {'00_7': 1, '03_9': 2, '06_8': 3, '10_7': 4, '12_0': 5}, - 'GOES-11': {'00_7': 1, '03_9': 2, '06_8': 3, '10_7': 4, '12_0': 5}, - 'GOES-12': {'00_7': 1, '03_9': 2, '06_5': 3, '10_7': 4, '13_3': 6}, - 'GOES-13': {'00_7': 1, '03_9': 2, '06_5': 3, '10_7': 4, '13_3': 6}, - 'GOES-14': {'00_7': 1, '03_9': 2, '06_5': 3, '10_7': 4, '13_3': 6}, - 'GOES-15': {'00_7': 1, '03_9': 2, '06_5': 3, '10_7': 4, '13_3': 6}, + "GOES-8": {"00_7": 1, "03_9": 2, "06_8": 3, "10_7": 4, "12_0": 5}, + "GOES-9": {"00_7": 1, "03_9": 2, "06_8": 3, "10_7": 4, "12_0": 5}, + "GOES-10": {"00_7": 1, "03_9": 2, "06_8": 3, "10_7": 4, "12_0": 5}, + "GOES-11": {"00_7": 1, "03_9": 2, "06_8": 3, "10_7": 4, "12_0": 5}, + "GOES-12": {"00_7": 1, "03_9": 2, "06_5": 3, "10_7": 4, "13_3": 6}, + "GOES-13": {"00_7": 1, "03_9": 2, "06_5": 3, "10_7": 4, "13_3": 6}, + "GOES-14": {"00_7": 1, "03_9": 2, "06_5": 3, "10_7": 4, "13_3": 6}, + "GOES-15": {"00_7": 1, "03_9": 2, "06_5": 3, "10_7": 4, "13_3": 6}, } ir_tables = { - 'GOES-8': '2-1', - 'GOES-9': '2-2', - 'GOES-10': '2-3', - 'GOES-11': '2-4', - 'GOES-12': '2-5a', - 'GOES-13': '2-6', - 'GOES-14': '2-7c', - 'GOES-15': '2-8b' + "GOES-8": "2-1", + "GOES-9": "2-2", + "GOES-10": "2-3", + "GOES-11": "2-4", + "GOES-12": "2-5a", + "GOES-13": "2-6", + "GOES-14": "2-7c", + "GOES-15": "2-8b" } vis_tables = { - 'GOES-8': 'Table 1.', - 'GOES-9': 'Table 1.', - 'GOES-10': 'Table 2.', - 'GOES-11': 'Table 3.', - 'GOES-12': 'Table 4.', - 'GOES-13': 'Table 5.', - 'GOES-14': 'Table 6.', - 'GOES-15': 'Table 7.' + "GOES-8": "Table 1.", + "GOES-9": "Table 1.", + "GOES-10": "Table 2.", + "GOES-11": "Table 3.", + "GOES-12": "Table 4.", + "GOES-13": "Table 5.", + "GOES-14": "Table 6.", + "GOES-15": "Table 7." } def __init__(self, ir_url, vis_url): @@ -1217,13 +1219,13 @@ def _load_url_or_file(self, url): except (MissingSchema, requests.HTTPError): # Not a valid URL, is it a file? try: - return open(url, mode='r') + return open(url, mode="r") except IOError: - raise ValueError('Invalid URL or file: {}'.format(url)) + raise ValueError("Invalid URL or file: {}".format(url)) def get_coefs(self, platform, channel): """Get the coefs.""" - if channel == '00_7': + if channel == "00_7": return self._get_vis_coefs(platform=platform) return self._get_ir_coefs(platform=platform, channel=channel) @@ -1236,27 +1238,27 @@ def _get_ir_coefs(self, platform, channel): # Extract scale and offset for conversion counts->radiance from # Table 1-1 (same for all platforms, only depends on the channel) gvar_channel = self.gvar_channels[platform][channel] - table11 = self._get_table(root=self.ir_html, heading='Table 1-1', - heading_type='h3') + table11 = self._get_table(root=self.ir_html, heading="Table 1-1", + heading_type="h3") for row in table11: if int(row[0]) == gvar_channel: - coefs['scale'] = self._float(row[1]) - coefs['offset'] = self._float(row[2]) + coefs["scale"] = self._float(row[1]) + coefs["offset"] = self._float(row[2]) # Extract n,a,b (radiance -> BT) from the coefficient table for the # given platform table = self._get_table(root=self.ir_html, heading=self.ir_tables[platform], - heading_type='h3') - channel_regex = re.compile('^{}(?:/[a,b])?$'.format(gvar_channel)) + heading_type="h3") + channel_regex = re.compile("^{}(?:/[a,b])?$".format(gvar_channel)) for row in table: if channel_regex.match(row[0]): # Extract coefficients. Detector (a) always comes before (b) # in the table so that simply appending preserves the order. - coefs['n'].append(self._float(row[1])) - coefs['a'].append(self._float(row[2])) - coefs['b'].append(self._float(row[3])) + coefs["n"].append(self._float(row[1])) + coefs["a"].append(self._float(row[2])) + coefs["b"].append(self._float(row[3])) return coefs @@ -1266,28 +1268,28 @@ def _get_vis_coefs(self, platform): # Find calibration table table = self._get_table(root=self.vis_html, heading=self.vis_tables[platform], - heading_type='p') + heading_type="p") # Extract values coefs = defaultdict(list) - if platform in ('GOES-8', 'GOES-9'): + if platform in ("GOES-8", "GOES-9"): # GOES 8&9 coefficients are in the same table - col = 1 if platform == 'GOES-8' else 2 - coefs['slope'].append(self._float(table[1][col])) - coefs['x0'] = self._float(table[2][col]) - coefs['offset'].append(self._float(table[3][col])) - coefs['k'] = self._float(table[4][col]) + col = 1 if platform == "GOES-8" else 2 + coefs["slope"].append(self._float(table[1][col])) + coefs["x0"] = self._float(table[2][col]) + coefs["offset"].append(self._float(table[3][col])) + coefs["k"] = self._float(table[4][col]) else: # k and x0 appear in the first row only - coefs['slope'].append(self._float(table[0][1])) - coefs['x0'] = self._float(table[0][2]) - coefs['k'] = self._float(table[0][4]) - coefs['offset'].append(self._float(table[0][3])) + coefs["slope"].append(self._float(table[0][1])) + coefs["x0"] = self._float(table[0][2]) + coefs["k"] = self._float(table[0][4]) + coefs["offset"].append(self._float(table[0][3])) # Remaining rows for row in table[1:]: - coefs['slope'].append(self._float(row[1])) - coefs['offset'].append(self._float(row[2])) + coefs["slope"].append(self._float(row[1])) + coefs["offset"].append(self._float(row[2])) return coefs @@ -1296,7 +1298,7 @@ def _get_table(self, root, heading, heading_type, ): headings = [h for h in root.find_all(heading_type) if heading in h.text] if not headings: - raise ValueError('Cannot find a coefficient table matching text ' + raise ValueError("Cannot find a coefficient table matching text " '"{}"'.format(heading)) if len(headings) > 1: raise ValueError('Found multiple headings matching text "{}"' @@ -1305,14 +1307,14 @@ def _get_table(self, root, heading, heading_type, ): # Copy items to a list of lists tab = list() - for row in table.find_all('tr'): - cols = row.find_all('td') + for row in table.find_all("tr"): + cols = row.find_all("td") if cols: tab.append([c.text for c in cols]) return tab def _denoise(self, string): - return string.replace('\n', '').replace(' ', '') + return string.replace("\n", "").replace(" ", "") def _float(self, string): """Convert string to float. @@ -1320,11 +1322,11 @@ def _float(self, string): Take care of numbers in exponential format """ string = self._denoise(string) - exp_match = re.match(r'^[-.\d]+x10-(\d)$', string) + exp_match = re.match(r"^[-.\d]+x10-(\d)$", string) if exp_match: exp = int(exp_match.groups()[0]) fac = 10 ** -exp - string = string.replace('x10-{}'.format(exp), '') + string = string.replace("x10-{}".format(exp), "") else: fac = 1 @@ -1355,10 +1357,10 @@ def test_coefs(ir_url, vis_url): for cname in coefs_expected.keys(): if not np.allclose(coefs[cname], coefs_expected[cname]): raise ValueError( - 'Coefficient {} for {} channel {} does not match the ' - 'reference'.format(cname, platform, channel)) + "Coefficient {} for {} channel {} does not match the " + "reference".format(cname, platform, channel)) - logger.info('Coefficients OK') + logger.info("Coefficients OK") return True @@ -1384,12 +1386,12 @@ def get_area_def_with_uniform_sampling(self, projection_longitude): def _get_projection(self, projection_longitude): return { - 'a': EQUATOR_RADIUS, - 'b': POLE_RADIUS, - 'lon_0': projection_longitude, - 'h': ALTITUDE, - 'proj': 'geos', - 'units': 'm' + "a": EQUATOR_RADIUS, + "b": POLE_RADIUS, + "lon_0": projection_longitude, + "h": ALTITUDE, + "proj": "geos", + "units": "m" } def _get_area_extent_at_max_scan_angle(self, proj_dict): @@ -1398,9 +1400,9 @@ def _get_area_extent_at_max_scan_angle(self, proj_dict): def _get_max_scan_angle(self, proj_dict): dummy_area = pyresample.geometry.AreaDefinition( - area_id='dummy', - proj_id='dummy', - description='dummy', + area_id="dummy", + proj_id="dummy", + description="dummy", projection=proj_dict, width=2, height=2, @@ -1427,8 +1429,8 @@ def _get_uniform_pixel_size(self): def _create_area_def(self, projection, area_extent, shape): width, height = shape return pyresample.geometry.AreaDefinition( - area_id='goes_geos_uniform', - proj_id='goes_geos_uniform', + area_id="goes_geos_uniform", + proj_id="goes_geos_uniform", description=self._get_area_description(), projection=projection, width=width, @@ -1437,6 +1439,6 @@ def _create_area_def(self, projection, area_extent, shape): ) def _get_area_description(self): - return '{} geostationary projection (uniform sampling)'.format( + return "{} geostationary projection (uniform sampling)".format( self.platform_name ) diff --git a/satpy/readers/gpm_imerg.py b/satpy/readers/gpm_imerg.py index 3a68f8a9bb..7bc65ac4c6 100644 --- a/satpy/readers/gpm_imerg.py +++ b/satpy/readers/gpm_imerg.py @@ -49,34 +49,34 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Find the start time from filename info.""" - return datetime(self.finfo['date'].year, - self.finfo['date'].month, - self.finfo['date'].day, - self.finfo['start_time'].hour, - self.finfo['start_time'].minute, - self.finfo['start_time'].second) + return datetime(self.finfo["date"].year, + self.finfo["date"].month, + self.finfo["date"].day, + self.finfo["start_time"].hour, + self.finfo["start_time"].minute, + self.finfo["start_time"].second) @property def end_time(self): """Find the end time from filename info.""" - return datetime(self.finfo['date'].year, - self.finfo['date'].month, - self.finfo['date'].day, - self.finfo['end_time'].hour, - self.finfo['end_time'].minute, - self.finfo['end_time'].second) + return datetime(self.finfo["date"].year, + self.finfo["date"].month, + self.finfo["date"].day, + self.finfo["end_time"].hour, + self.finfo["end_time"].minute, + self.finfo["end_time"].second) def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" - file_key = ds_info.get('file_key', dataset_id['name']) - dsname = 'Grid/' + file_key + file_key = ds_info.get("file_key", dataset_id["name"]) + dsname = "Grid/" + file_key data = self.get(dsname) data = data.squeeze().transpose() if data.ndim >= 2: - data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'}) + data = data.rename({data.dims[-2]: "y", data.dims[-1]: "x"}) data.data = da.flip(data.data, axis=0) - fill = data.attrs['_FillValue'] + fill = data.attrs["_FillValue"] data = data.where(data != fill) for key in list(data.attrs.keys()): @@ -89,8 +89,8 @@ def get_dataset(self, dataset_id, ds_info): def get_area_def(self, dsid): """Create area definition from the gridded lat/lon values.""" - lats = self.__getitem__('Grid/lat').values - lons = self.__getitem__('Grid/lon').values + lats = self.__getitem__("Grid/lat").values + lons = self.__getitem__("Grid/lon").values width = lons.shape[0] height = lats.shape[0] @@ -103,8 +103,8 @@ def get_area_def(self, dsid): area_extent = (lower_left_x, lower_left_y, upper_right_x, upper_right_y) description = "IMERG GPM Equirectangular Projection" - area_id = 'imerg' - proj_id = 'equirectangular' - proj_dict = {'proj': 'longlat', 'datum': 'WGS84', 'ellps': 'WGS84', } + area_id = "imerg" + proj_id = "equirectangular" + proj_dict = {"proj": "longlat", "datum": "WGS84", "ellps": "WGS84", } area_def = AreaDefinition(area_id, description, proj_id, proj_dict, width, height, area_extent, ) return area_def diff --git a/satpy/readers/grib.py b/satpy/readers/grib.py index 2d78792f2b..dadccce77a 100644 --- a/satpy/readers/grib.py +++ b/satpy/readers/grib.py @@ -41,7 +41,7 @@ CHUNK_SIZE = get_legacy_chunk_size() CF_UNITS = { - 'none': '1', + "none": "1", } @@ -60,46 +60,46 @@ def __init__(self, filename, filename_info, filetype_info): first_msg = grib_file.message(1) last_msg = grib_file.message(grib_file.messages) start_time = self._convert_datetime( - first_msg, 'validityDate', 'validityTime') + first_msg, "validityDate", "validityTime") end_time = self._convert_datetime( - last_msg, 'validityDate', 'validityTime') + last_msg, "validityDate", "validityTime") self._start_time = start_time self._end_time = end_time - if 'keys' not in filetype_info: + if "keys" not in filetype_info: self._analyze_messages(grib_file) self._idx = None else: - self._create_dataset_ids(filetype_info['keys']) + self._create_dataset_ids(filetype_info["keys"]) self._idx = pygrib.index(self.filename, - *filetype_info['keys'].keys()) + *filetype_info["keys"].keys()) except (RuntimeError, KeyError): raise IOError("Unknown GRIB file format: {}".format(self.filename)) def _analyze_messages(self, grib_file): grib_file.seek(0) for idx, msg in enumerate(grib_file): - msg_id = DataQuery(name=msg['shortName'], - level=msg['level'], + msg_id = DataQuery(name=msg["shortName"], + level=msg["level"], modifiers=tuple()) ds_info = { - 'message': idx + 1, - 'name': msg['shortName'], - 'level': msg['level'], - 'file_type': self.filetype_info['file_type'], + "message": idx + 1, + "name": msg["shortName"], + "level": msg["level"], + "file_type": self.filetype_info["file_type"], } self._msg_datasets[msg_id] = ds_info def _create_dataset_ids(self, keys): from itertools import product - ordered_keys = [k for k in keys.keys() if 'id_key' in keys[k]] - for id_vals in product(*[keys[k]['values'] for k in ordered_keys]): - id_keys = [keys[k]['id_key'] for k in ordered_keys] + ordered_keys = [k for k in keys.keys() if "id_key" in keys[k]] + for id_vals in product(*[keys[k]["values"] for k in ordered_keys]): + id_keys = [keys[k]["id_key"] for k in ordered_keys] msg_info = dict(zip(ordered_keys, id_vals)) ds_info = dict(zip(id_keys, id_vals)) msg_id = DataQuery(**ds_info) ds_info = msg_id.to_dict() ds_info.update(msg_info) - ds_info['file_type'] = self.filetype_info['file_type'] + ds_info["file_type"] = self.filetype_info["file_type"] self._msg_datasets[msg_id] = ds_info @staticmethod @@ -137,11 +137,11 @@ def available_datasets(self, configured_datasets=None): def _get_message(self, ds_info): with pygrib.open(self.filename) as grib_file: - if 'message' in ds_info: - msg_num = ds_info['message'] + if "message" in ds_info: + msg_num = ds_info["message"] msg = grib_file.message(msg_num) else: - msg_keys = self.filetype_info['keys'].keys() + msg_keys = self.filetype_info["keys"].keys() msg = self._idx(**{k: ds_info[k] for k in msg_keys})[0] return msg @@ -154,7 +154,7 @@ def _correct_cyl_minmax_xy(proj_params, min_lon, min_lat, max_lon, max_lat): # wrap around # make 180 longitude the prime meridian # assuming we are going from 0 to 360 longitude - proj_params['pm'] = 180 + proj_params["pm"] = 180 proj = Proj(**proj_params) # recompute x/y extents with this new projection min_x, min_y = proj(min_lon, min_lat) @@ -173,9 +173,9 @@ def _get_cyl_minmax_lonlat(lons, lats): return min_lon, min_lat, max_lon, max_lat def _get_cyl_area_info(self, msg, proj_params): - proj_params['proj'] = 'eqc' - lons = msg['distinctLongitudes'] - lats = msg['distinctLatitudes'] + proj_params["proj"] = "eqc" + lons = msg["distinctLongitudes"] + lats = msg["distinctLatitudes"] shape = (lats.shape[0], lons.shape[0]) minmax_lonlat = self._get_cyl_minmax_lonlat(lons, lats) proj_params, minmax_xy = self._correct_cyl_minmax_xy(proj_params, *minmax_lonlat) @@ -208,14 +208,14 @@ def _get_corner_lonlat(proj_params, lons, lats): # if we have longitudes over 180, assume 0-360 if (lons > 180).any(): # make 180 longitude the prime meridian - proj_params['pm'] = 180 + proj_params["pm"] = 180 return proj_params, lons, lats def _get_area_info(self, msg, proj_params): lats, lons = msg.latlons() shape = lats.shape - scans_positively = (msg.valid_key('jScansPositively') and - msg['jScansPositively'] == 1) + scans_positively = (msg.valid_key("jScansPositively") and + msg["jScansPositively"] == 1) proj_params, lons, lats = self._get_corner_lonlat( proj_params, lons, lats) minmax_xy = self._get_corner_xy(proj_params, lons, lats, scans_positively) @@ -225,7 +225,7 @@ def _get_area_info(self, msg, proj_params): @staticmethod def _correct_proj_params_over_prime_meridian(proj_params): # correct for longitudes over 180 - for lon_param in ['lon_0', 'lon_1', 'lon_2']: + for lon_param in ["lon_0", "lon_1", "lon_2"]: if proj_params.get(lon_param, 0) > 180: proj_params[lon_param] -= 360 return proj_params @@ -234,16 +234,16 @@ def _area_def_from_msg(self, msg): proj_params = msg.projparams.copy() proj_params = self._correct_proj_params_over_prime_meridian(proj_params) - if proj_params['proj'] in ('cyl', 'eqc'): + if proj_params["proj"] in ("cyl", "eqc"): # eqc projection that goes from 0 to 360 proj_params, shape, extents = self._get_cyl_area_info(msg, proj_params) else: proj_params, shape, extents = self._get_area_info(msg, proj_params) return geometry.AreaDefinition( - 'on-the-fly grib area', - 'on-the-fly grib area', - 'on-the-fly grib area', + "on-the-fly grib area", + "on-the-fly grib area", + "on-the-fly grib area", proj_params, shape[1], shape[0], @@ -264,41 +264,41 @@ def get_area_def(self, dsid): def get_metadata(self, msg, ds_info): """Get metadata.""" - model_time = self._convert_datetime(msg, 'dataDate', - 'dataTime') - start_time = self._convert_datetime(msg, 'validityDate', - 'validityTime') + model_time = self._convert_datetime(msg, "dataDate", + "dataTime") + start_time = self._convert_datetime(msg, "validityDate", + "validityTime") end_time = start_time try: - center_description = msg['centreDescription'] + center_description = msg["centreDescription"] except (RuntimeError, KeyError): center_description = None key_dicts = { - 'shortName': 'shortName', - 'long_name': 'name', - 'pressureUnits': 'pressureUnits', - 'typeOfLevel': 'typeOfLevel', - 'standard_name': 'cfName', - 'units': 'units', - 'modelName': 'modelName', - 'valid_min': 'minimum', - 'valid_max': 'maximum', - 'sensor': 'modelName'} + "shortName": "shortName", + "long_name": "name", + "pressureUnits": "pressureUnits", + "typeOfLevel": "typeOfLevel", + "standard_name": "cfName", + "units": "units", + "modelName": "modelName", + "valid_min": "minimum", + "valid_max": "maximum", + "sensor": "modelName"} ds_info.update({ - 'filename': self.filename, - 'model_time': model_time, - 'centreDescription': center_description, - 'start_time': start_time, - 'end_time': end_time, - 'platform_name': 'unknown'}) + "filename": self.filename, + "model_time": model_time, + "centreDescription": center_description, + "start_time": start_time, + "end_time": end_time, + "platform_name": "unknown"}) for key in key_dicts: if key_dicts[key] in msg.keys(): ds_info[key] = msg[key_dicts[key]] else: - ds_info[key] = 'unknown' + ds_info[key] = "unknown" return ds_info @@ -306,9 +306,9 @@ def get_dataset(self, dataset_id, ds_info): """Read a GRIB message into an xarray DataArray.""" msg = self._get_message(ds_info) ds_info = self.get_metadata(msg, ds_info) - fill = msg['missingValue'] + fill = msg["missingValue"] data = msg.values.astype(np.float32) - if msg.valid_key('jScansPositively') and msg['jScansPositively'] == 1: + if msg.valid_key("jScansPositively") and msg["jScansPositively"] == 1: data = data[::-1] if isinstance(data, np.ma.MaskedArray): @@ -318,4 +318,4 @@ def get_dataset(self, dataset_id, ds_info): data[data == fill] = np.nan data = da.from_array(data, chunks=CHUNK_SIZE) - return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) + return xr.DataArray(data, attrs=ds_info, dims=("y", "x")) diff --git a/satpy/readers/hdf4_utils.py b/satpy/readers/hdf4_utils.py index fb20c0ce11..d6258d9d62 100644 --- a/satpy/readers/hdf4_utils.py +++ b/satpy/readers/hdf4_utils.py @@ -47,9 +47,9 @@ def from_sds(var, *args, **kwargs): """Create a dask array from a SD dataset.""" - var.__dict__['dtype'] = np.dtype(HTYPE_TO_DTYPE[var.info()[3]]) + var.__dict__["dtype"] = np.dtype(HTYPE_TO_DTYPE[var.info()[3]]) shape = var.info()[2] - var.__dict__['shape'] = shape if isinstance(shape, (tuple, list)) else tuple(shape) + var.__dict__["shape"] = shape if isinstance(shape, (tuple, list)) else tuple(shape) return da.from_array(var, *args, **kwargs) @@ -61,7 +61,7 @@ def __init__(self, filename, filename_info, filetype_info): super(HDF4FileHandler, self).__init__(filename, filename_info, filetype_info) self.file_content = {} file_handle = SD(self.filename, SDC.READ) - self._collect_attrs('', file_handle.attributes()) + self._collect_attrs("", file_handle.attributes()) for k in file_handle.datasets().keys(): self.collect_metadata(k, file_handle.select(k)) del file_handle @@ -94,7 +94,7 @@ def _open_xarray_dataset(self, val, chunks=CHUNK_SIZE): """Read the band in blocks.""" dask_arr = from_sds(val, chunks=chunks) attrs = val.attributes() - return xr.DataArray(dask_arr, dims=('y', 'x'), + return xr.DataArray(dask_arr, dims=("y", "x"), attrs=attrs) def __getitem__(self, key): diff --git a/satpy/readers/hdf5_utils.py b/satpy/readers/hdf5_utils.py index 2a1c8c23bb..428d64e2f1 100644 --- a/satpy/readers/hdf5_utils.py +++ b/satpy/readers/hdf5_utils.py @@ -43,14 +43,14 @@ def __init__(self, filename, filename_info, filetype_info): self._attrs_cache = {} try: - file_handle = h5py.File(self.filename, 'r') + file_handle = h5py.File(self.filename, "r") except IOError: LOG.exception( - 'Failed reading file %s. Possibly corrupted file', self.filename) + "Failed reading file %s. Possibly corrupted file", self.filename) raise file_handle.visititems(self.collect_metadata) - self._collect_attrs('', file_handle.attrs) + self._collect_attrs("", file_handle.attrs) file_handle.close() def _collect_attrs(self, name, attrs): @@ -73,7 +73,7 @@ def _collect_attrs(self, name, attrs): def get_reference(self, name, key): """Get reference.""" - with h5py.File(self.filename, 'r') as hf: + with h5py.File(self.filename, "r") as hf: return self._get_reference(hf, hf[name].attrs[key]) def _get_reference(self, hf, ref): @@ -97,11 +97,11 @@ def __getitem__(self, key): val = self.file_content[key] if isinstance(val, h5py.Dataset): # these datasets are closed and inaccessible when the file is closed, need to reopen - dset = h5py.File(self.filename, 'r')[key] + dset = h5py.File(self.filename, "r")[key] dset_data = da.from_array(dset, chunks=CHUNK_SIZE) attrs = self._attrs_cache.get(key, dset.attrs) if dset.ndim == 2: - return xr.DataArray(dset_data, dims=['y', 'x'], attrs=attrs) + return xr.DataArray(dset_data, dims=["y", "x"], attrs=attrs) return xr.DataArray(dset_data, attrs=attrs) return val diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 56b15b626d..f60040a46f 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -118,7 +118,7 @@ def _load_all_metadata_attributes(self): @classmethod def read_mda(cls, attribute): """Read the EOS metadata.""" - line_iterator = iter(attribute.split('\n')) + line_iterator = iter(attribute.split("\n")) return cls._read_mda(line_iterator) @classmethod @@ -128,18 +128,18 @@ def _read_mda(cls, lines, element=None): for line in lines: if not line: continue - if line == 'END': + if line == "END": return current_dict key, val = cls._split_line(line, lines) - if key in ['GROUP', 'OBJECT']: + if key in ["GROUP", "OBJECT"]: current_dict[val] = cls._read_mda(lines, val) - elif key in ['END_GROUP', 'END_OBJECT']: + elif key in ["END_GROUP", "END_OBJECT"]: if val != element: raise SyntaxError("Non-matching end-tag") return current_dict - elif key in ['CLASS', 'NUM_VAL']: + elif key in ["CLASS", "NUM_VAL"]: pass else: current_dict[key] = val @@ -148,7 +148,7 @@ def _read_mda(cls, lines, element=None): @classmethod def _split_line(cls, line, lines): - key, val = line.split('=') + key, val = line.split("=") key = key.strip() val = val.strip() try: @@ -163,8 +163,8 @@ def metadata_platform_name(self): """Platform name from the internal file metadata.""" try: # Example: 'Terra' or 'Aqua' - return self.metadata['INVENTORYMETADATA']['ASSOCIATEDPLATFORMINSTRUMENTSENSOR'][ - 'ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER']['ASSOCIATEDPLATFORMSHORTNAME']['VALUE'] + return self.metadata["INVENTORYMETADATA"]["ASSOCIATEDPLATFORMINSTRUMENTSENSOR"][ + "ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER"]["ASSOCIATEDPLATFORMSHORTNAME"]["VALUE"] except KeyError: return self._platform_name_from_filename() @@ -180,9 +180,9 @@ def _platform_name_from_filename(self): def start_time(self): """Get the start time of the dataset.""" try: - date = (self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEBEGINNINGDATE']['VALUE'] + ' ' + - self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEBEGINNINGTIME']['VALUE']) - return datetime.strptime(date, '%Y-%m-%d %H:%M:%S.%f') + date = (self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEBEGINNINGDATE"]["VALUE"] + " " + + self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEBEGINNINGTIME"]["VALUE"]) + return datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") except KeyError: return self._start_time_from_filename() @@ -193,9 +193,9 @@ def _start_time_from_filename(self): def end_time(self): """Get the end time of the dataset.""" try: - date = (self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEENDINGDATE']['VALUE'] + ' ' + - self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEENDINGTIME']['VALUE']) - return datetime.strptime(date, '%Y-%m-%d %H:%M:%S.%f') + date = (self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEENDINGDATE"]["VALUE"] + " " + + self.metadata["INVENTORYMETADATA"]["RANGEDATETIME"]["RANGEENDINGTIME"]["VALUE"]) + return datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f") except KeyError: return self.start_time @@ -216,7 +216,7 @@ def load_dataset(self, dataset_name, is_category=False): dataset = self._read_dataset_in_file(dataset_name) chunks = self._chunks_for_variable(dataset) dask_arr = from_sds(dataset, chunks=chunks) - dims = ('y', 'x') if dask_arr.ndim == 2 else None + dims = ("y", "x") if dask_arr.ndim == 2 else None data = xr.DataArray(dask_arr, dims=dims, attrs=dataset.attributes()) data = self._scale_and_mask_data_array(data, is_category=is_category) @@ -262,8 +262,8 @@ def _scale_and_mask_data_array(self, data, is_category=False): """ good_mask, new_fill = self._get_good_data_mask(data, is_category=is_category) - scale_factor = data.attrs.pop('scale_factor', None) - add_offset = data.attrs.pop('add_offset', None) + scale_factor = data.attrs.pop("scale_factor", None) + add_offset = data.attrs.pop("add_offset", None) # don't scale category products, even though scale_factor may equal 1 # we still need to convert integers to floats if scale_factor is not None and not is_category: @@ -286,15 +286,15 @@ def _get_good_data_mask(self, data_arr, is_category=False): # no need to mask, the fill value is already what it needs to be return None, None new_fill = np.nan - data_arr.attrs.pop('_FillValue', None) + data_arr.attrs.pop("_FillValue", None) good_mask = data_arr != fill_value return good_mask, new_fill def _add_satpy_metadata(self, data_id: DataID, data_arr: xr.DataArray): """Add metadata that is specific to Satpy.""" new_attrs = { - 'platform_name': 'EOS-' + self.metadata_platform_name, - 'sensor': 'modis', + "platform_name": "EOS-" + self.metadata_platform_name, + "sensor": "modis", } res = data_id["resolution"] @@ -319,12 +319,12 @@ class HDFEOSGeoReader(HDFEOSBaseFileReader): # list of geographical datasets handled by the georeader # mapping to the default variable name if not specified in YAML DATASET_NAMES = { - 'longitude': 'Longitude', - 'latitude': 'Latitude', - 'satellite_azimuth_angle': ('SensorAzimuth', 'Sensor_Azimuth'), - 'satellite_zenith_angle': ('SensorZenith', 'Sensor_Zenith'), - 'solar_azimuth_angle': ('SolarAzimuth', 'SolarAzimuth'), - 'solar_zenith_angle': ('SolarZenith', 'Solar_Zenith'), + "longitude": "Longitude", + "latitude": "Latitude", + "satellite_azimuth_angle": ("SensorAzimuth", "Sensor_Azimuth"), + "satellite_zenith_angle": ("SensorZenith", "Sensor_Zenith"), + "solar_azimuth_angle": ("SolarAzimuth", "SolarAzimuth"), + "solar_zenith_angle": ("SolarZenith", "Solar_Zenith"), } def __init__(self, filename, filename_info, filetype_info, **kwargs): @@ -351,8 +351,8 @@ def read_geo_resolution(metadata): @staticmethod def _geo_resolution_for_l1b(metadata): - ds = metadata['INVENTORYMETADATA']['COLLECTIONDESCRIPTIONCLASS']['SHORTNAME']['VALUE'] - if ds.endswith('D03') or ds.endswith('HKM') or ds.endswith('QKM'): + ds = metadata["INVENTORYMETADATA"]["COLLECTIONDESCRIPTIONCLASS"]["SHORTNAME"]["VALUE"] + if ds.endswith("D03") or ds.endswith("HKM") or ds.endswith("QKM"): return 1000 # 1km files have 5km geolocation usually return 5000 @@ -362,10 +362,10 @@ def _geo_resolution_for_l2_l1b(metadata): # data files probably have this level 2 files # this does not work for L1B 1KM data files because they are listed # as 1KM data but the geo data inside is at 5km - latitude_dim = metadata['SwathStructure']['SWATH_1']['DimensionMap']['DimensionMap_2']['GeoDimension'] - resolution_regex = re.compile(r'(?P\d+)(km|KM)') + latitude_dim = metadata["SwathStructure"]["SWATH_1"]["DimensionMap"]["DimensionMap_2"]["GeoDimension"] + resolution_regex = re.compile(r"(?P\d+)(km|KM)") resolution_match = resolution_regex.search(latitude_dim) - return int(resolution_match.group('resolution')) * 1000 + return int(resolution_match.group("resolution")) * 1000 @property def geo_resolution(self): @@ -391,7 +391,7 @@ def get_interpolated_dataset(self, name1, name2, resolution, offset=0): result1 = self._load_ds_by_name(name1) result2 = self._load_ds_by_name(name2) - offset try: - sensor_zenith = self._load_ds_by_name('satellite_zenith_angle') + sensor_zenith = self._load_ds_by_name("satellite_zenith_angle") except KeyError: # no sensor zenith angle, do "simple" interpolation sensor_zenith = None @@ -406,11 +406,11 @@ def get_interpolated_dataset(self, name1, name2, resolution, offset=0): def get_dataset(self, dataset_id: DataID, dataset_info: dict) -> xr.DataArray: """Get the geolocation dataset.""" # Name of the dataset as it appears in the HDF EOS file - in_file_dataset_name = dataset_info.get('file_key') + in_file_dataset_name = dataset_info.get("file_key") # Name of the dataset in the YAML file - dataset_name = dataset_id['name'] + dataset_name = dataset_id["name"] # Resolution asked - resolution = dataset_id['resolution'] + resolution = dataset_id["resolution"] if in_file_dataset_name is not None: # if the YAML was configured with a specific name use that data = self.load_dataset(in_file_dataset_name) @@ -427,21 +427,21 @@ def get_dataset(self, dataset_id: DataID, dataset_info: dict) -> xr.DataArray: # The data must be interpolated logger.debug("Loading %s", dataset_name) - if dataset_name in ['longitude', 'latitude']: - self.get_interpolated_dataset('longitude', 'latitude', + if dataset_name in ["longitude", "latitude"]: + self.get_interpolated_dataset("longitude", "latitude", resolution) - elif dataset_name in ['satellite_azimuth_angle', 'satellite_zenith_angle']: + elif dataset_name in ["satellite_azimuth_angle", "satellite_zenith_angle"]: # Sensor dataset names differs between L1b and L2 products - self.get_interpolated_dataset('satellite_azimuth_angle', 'satellite_zenith_angle', + self.get_interpolated_dataset("satellite_azimuth_angle", "satellite_zenith_angle", resolution, offset=90) - elif dataset_name in ['solar_azimuth_angle', 'solar_zenith_angle']: + elif dataset_name in ["solar_azimuth_angle", "solar_zenith_angle"]: # Sensor dataset names differs between L1b and L2 products - self.get_interpolated_dataset('solar_azimuth_angle', 'solar_zenith_angle', + self.get_interpolated_dataset("solar_azimuth_angle", "solar_zenith_angle", resolution, offset=90) data = self.cache[dataset_name, resolution] - for key in ('standard_name', 'units'): + for key in ("standard_name", "units"): if key in dataset_info: data.attrs[key] = dataset_info[key] self._add_satpy_metadata(dataset_id, data) diff --git a/satpy/readers/hrit_base.py b/satpy/readers/hrit_base.py index c8b2287653..bf53d84a65 100644 --- a/satpy/readers/hrit_base.py +++ b/satpy/readers/hrit_base.py @@ -48,41 +48,41 @@ from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.seviri_base import dec10216 -logger = logging.getLogger('hrit_base') +logger = logging.getLogger("hrit_base") -common_hdr = np.dtype([('hdr_id', 'u1'), - ('record_length', '>u2')]) +common_hdr = np.dtype([("hdr_id", "u1"), + ("record_length", ">u2")]) -primary_header = np.dtype([('file_type', 'u1'), - ('total_header_length', '>u4'), - ('data_field_length', '>u8')]) +primary_header = np.dtype([("file_type", "u1"), + ("total_header_length", ">u4"), + ("data_field_length", ">u8")]) -image_structure = np.dtype([('number_of_bits_per_pixel', 'u1'), - ('number_of_columns', '>u2'), - ('number_of_lines', '>u2'), - ('compression_flag_for_data', 'u1')]) +image_structure = np.dtype([("number_of_bits_per_pixel", "u1"), + ("number_of_columns", ">u2"), + ("number_of_lines", ">u2"), + ("compression_flag_for_data", "u1")]) -image_navigation = np.dtype([('projection_name', 'S32'), - ('cfac', '>i4'), - ('lfac', '>i4'), - ('coff', '>i4'), - ('loff', '>i4')]) +image_navigation = np.dtype([("projection_name", "S32"), + ("cfac", ">i4"), + ("lfac", ">i4"), + ("coff", ">i4"), + ("loff", ">i4")]) -image_data_function = np.dtype([('function', '|S1')]) +image_data_function = np.dtype([("function", "|S1")]) -annotation_header = np.dtype([('annotation', '|S1')]) +annotation_header = np.dtype([("annotation", "|S1")]) -timestamp_record = np.dtype([('cds_p_field', 'u1'), - ('timestamp', time_cds_short)]) +timestamp_record = np.dtype([("cds_p_field", "u1"), + ("timestamp", time_cds_short)]) -ancillary_text = np.dtype([('ancillary', '|S1')]) +ancillary_text = np.dtype([("ancillary", "|S1")]) -key_header = np.dtype([('key', '|S1')]) +key_header = np.dtype([("key", "|S1")]) -base_text_headers = {image_data_function: 'image_data_function', - annotation_header: 'annotation_header', - ancillary_text: 'ancillary_text', - key_header: 'key_header'} +base_text_headers = {image_data_function: "image_data_function", + annotation_header: "annotation_header", + ancillary_text: "ancillary_text", + key_header: "key_header"} base_hdr_map = {0: primary_header, 1: image_structure, @@ -97,7 +97,7 @@ def get_xritdecompress_cmd(): """Find a valid binary for the xRITDecompress command.""" - cmd = os.environ.get('XRIT_DECOMPRESS_PATH', None) + cmd = os.environ.get("XRIT_DECOMPRESS_PATH", None) if not cmd: raise IOError("XRIT_DECOMPRESS_PATH is not defined (complete path to xRITDecompress)") @@ -112,20 +112,20 @@ def get_xritdecompress_cmd(): def get_xritdecompress_outfile(stdout): """Analyse the output of the xRITDecompress command call and return the file.""" - outfile = b'' + outfile = b"" for line in stdout: try: - k, v = [x.strip() for x in line.split(b':', 1)] + k, v = [x.strip() for x in line.split(b":", 1)] except ValueError: break - if k == b'Decompressed file': + if k == b"Decompressed file": outfile = v break return outfile -def decompress(infile, outdir='.'): +def decompress(infile, outdir="."): """Decompress an XRIT data file and return the path to the decompressed file. It expect to find Eumetsat's xRITDecompress through the environment variable @@ -149,7 +149,7 @@ def decompress(infile, outdir='.'): if not outfile: raise IOError("xrit_decompress '%s', failed, no output file is generated" % infile) - return os.path.join(outdir, outfile.decode('utf-8')) + return os.path.join(outdir, outfile.decode("utf-8")) def get_header_id(fp): @@ -175,20 +175,20 @@ def __init__(self, filename, filename_info, filetype_info, hdr_info): self.mda = {} self.hdr_info = hdr_info self._get_hd(self.hdr_info) - self._start_time = filename_info['start_time'] + self._start_time = filename_info["start_time"] self._end_time = self._start_time + timedelta(minutes=15) def _get_hd(self, hdr_info): """Open the file, read and get the basic file header info and set the mda dictionary.""" hdr_map, variable_length_headers, text_headers = hdr_info - with utils.generic_open(self.filename, mode='rb') as fp: + with utils.generic_open(self.filename, mode="rb") as fp: total_header_length = 16 while fp.tell() < total_header_length: hdr_id = get_header_id(fp) - the_type = hdr_map[hdr_id['hdr_id']] + the_type = hdr_map[hdr_id["hdr_id"]] if the_type in variable_length_headers: - field_length = int((hdr_id['record_length'] - 3) / + field_length = int((hdr_id["record_length"] - 3) / the_type.itemsize) current_hdr = get_header_content(fp, the_type, field_length) key = variable_length_headers[the_type] @@ -199,7 +199,7 @@ def _get_hd(self, hdr_info): else: self.mda[key] = current_hdr elif the_type in text_headers: - field_length = int((hdr_id['record_length'] - 3) / + field_length = int((hdr_id["record_length"] - 3) / the_type.itemsize) char = list(the_type.fields.values())[0][0].char new_type = np.dtype(char + str(field_length)) @@ -210,16 +210,16 @@ def _get_hd(self, hdr_info): self.mda.update( dict(zip(current_hdr.dtype.names, current_hdr))) - total_header_length = self.mda['total_header_length'] + total_header_length = self.mda["total_header_length"] - self.mda.setdefault('number_of_bits_per_pixel', 10) + self.mda.setdefault("number_of_bits_per_pixel", 10) - self.mda['projection_parameters'] = {'a': 6378169.00, - 'b': 6356583.80, - 'h': 35785831.00, + self.mda["projection_parameters"] = {"a": 6378169.00, + "b": 6356583.80, + "h": 35785831.00, # FIXME: find a reasonable SSP - 'SSP_longitude': 0.0} - self.mda['orbital_parameters'] = {} + "SSP_longitude": 0.0} + self.mda["orbital_parameters"] = {} @property def observation_start_time(self): @@ -247,7 +247,7 @@ def get_dataset(self, key, info): data = self.read_band(key, info) # Convert to xarray - xdata = xr.DataArray(data, dims=['y', 'x']) + xdata = xr.DataArray(data, dims=["y", "x"]) return xdata @@ -282,34 +282,34 @@ def get_area_extent(self, size, offsets, factors, platform_height): def get_area_def(self, dsid): """Get the area definition of the band.""" - cfac = np.int32(self.mda['cfac']) - lfac = np.int32(self.mda['lfac']) - coff = np.float32(self.mda['coff']) - loff = np.float32(self.mda['loff']) - - a = self.mda['projection_parameters']['a'] - b = self.mda['projection_parameters']['b'] - h = self.mda['projection_parameters']['h'] - lon_0 = self.mda['projection_parameters']['SSP_longitude'] - nlines = int(self.mda['number_of_lines']) - ncols = int(self.mda['number_of_columns']) + cfac = np.int32(self.mda["cfac"]) + lfac = np.int32(self.mda["lfac"]) + coff = np.float32(self.mda["coff"]) + loff = np.float32(self.mda["loff"]) + + a = self.mda["projection_parameters"]["a"] + b = self.mda["projection_parameters"]["b"] + h = self.mda["projection_parameters"]["h"] + lon_0 = self.mda["projection_parameters"]["SSP_longitude"] + nlines = int(self.mda["number_of_lines"]) + ncols = int(self.mda["number_of_columns"]) area_extent = self.get_area_extent((nlines, ncols), (loff, coff), (lfac, cfac), h) - proj_dict = {'a': float(a), - 'b': float(b), - 'lon_0': float(lon_0), - 'h': float(h), - 'proj': 'geos', - 'units': 'm'} + proj_dict = {"a": float(a), + "b": float(b), + "lon_0": float(lon_0), + "h": float(h), + "proj": "geos", + "units": "m"} area = geometry.AreaDefinition( - 'some_area_name', + "some_area_name", "On-the-fly area", - 'geosmsg', + "geosmsg", proj_dict, ncols, nlines, @@ -326,14 +326,14 @@ def read_band(self, key, info): dtype=output_dtype) def _get_output_info(self): - bpp = self.mda['number_of_bits_per_pixel'] + bpp = self.mda["number_of_bits_per_pixel"] if bpp in [10, 16]: output_dtype = np.uint16 elif bpp == 8: output_dtype = np.uint8 else: raise ValueError(f"Unexpected number of bits per pixel: {bpp}") - output_shape = (self.mda['number_of_lines'], self.mda['number_of_columns']) + output_shape = (self.mda["number_of_lines"], self.mda["number_of_columns"]) return output_dtype, output_shape @@ -361,12 +361,12 @@ def __init__(self, filename, mda): """Set up the segment.""" self.filename = filename self.mda = mda - self.lines = mda['number_of_lines'] - self.cols = mda['number_of_columns'] - self.bpp = mda['number_of_bits_per_pixel'] - self.compressed = mda['compression_flag_for_data'] == 1 - self.offset = mda['total_header_length'] - self.zipped = os.fspath(filename).endswith('.bz2') + self.lines = mda["number_of_lines"] + self.cols = mda["number_of_columns"] + self.bpp = mda["number_of_bits_per_pixel"] + self.compressed = mda["compression_flag_for_data"] == 1 + self.offset = mda["total_header_length"] + self.zipped = os.fspath(filename).endswith(".bz2") def read_data(self): """Read the data.""" @@ -410,7 +410,7 @@ def _get_input_info(self): total_bits = int(self.lines) * int(self.cols) * int(self.bpp) input_shape = int(np.ceil(total_bits / 8.)) if self.bpp == 16: - input_dtype = '>u2' + input_dtype = ">u2" input_shape //= 2 elif self.bpp in [8, 10]: input_dtype = np.uint8 diff --git a/satpy/readers/hrit_jma.py b/satpy/readers/hrit_jma.py index 4b06a3d707..c273b9b578 100644 --- a/satpy/readers/hrit_jma.py +++ b/satpy/readers/hrit_jma.py @@ -34,8 +34,8 @@ - `AHI sample data`_ -Example -------- +Example: +-------- Here is an example how to read Himwari-8 HRIT data with Satpy: .. code-block:: python @@ -123,33 +123,33 @@ ) from satpy.readers.utils import get_geostationary_mask -logger = logging.getLogger('hrit_jma') +logger = logging.getLogger("hrit_jma") # JMA implementation: -key_header = np.dtype([('key_number', 'u4')]) +key_header = np.dtype([("key_number", "u4")]) -segment_identification = np.dtype([('image_segm_seq_no', '>u1'), - ('total_no_image_segm', '>u1'), - ('line_no_image_segm', '>u2')]) +segment_identification = np.dtype([("image_segm_seq_no", ">u1"), + ("total_no_image_segm", ">u1"), + ("line_no_image_segm", ">u2")]) -encryption_key_message = np.dtype([('station_number', '>u2')]) +encryption_key_message = np.dtype([("station_number", ">u2")]) -image_compensation_information = np.dtype([('compensation', '|S1')]) +image_compensation_information = np.dtype([("compensation", "|S1")]) -image_observation_time = np.dtype([('times', '|S1')]) +image_observation_time = np.dtype([("times", "|S1")]) -image_quality_information = np.dtype([('quality', '|S1')]) +image_quality_information = np.dtype([("quality", "|S1")]) jma_variable_length_headers: dict = {} -jma_text_headers = {image_data_function: 'image_data_function', - annotation_header: 'annotation_header', - ancillary_text: 'ancillary_text', - image_compensation_information: 'image_compensation_information', - image_observation_time: 'image_observation_time', - image_quality_information: 'image_quality_information'} +jma_text_headers = {image_data_function: "image_data_function", + annotation_header: "annotation_header", + ancillary_text: "ancillary_text", + image_compensation_information: "image_compensation_information", + image_observation_time: "image_observation_time", + image_quality_information: "image_quality_information"} jma_hdr_map = base_hdr_map.copy() jma_hdr_map.update({7: key_header, @@ -161,45 +161,45 @@ }) -cuc_time = np.dtype([('coarse', 'u1', (4, )), - ('fine', 'u1', (3, ))]) +cuc_time = np.dtype([("coarse", "u1", (4, )), + ("fine", "u1", (3, ))]) -time_cds_expanded = np.dtype([('days', '>u2'), - ('milliseconds', '>u4'), - ('microseconds', '>u2'), - ('nanoseconds', '>u2')]) +time_cds_expanded = np.dtype([("days", ">u2"), + ("milliseconds", ">u4"), + ("microseconds", ">u2"), + ("nanoseconds", ">u2")]) FULL_DISK = 1 NORTH_HEMIS = 2 SOUTH_HEMIS = 3 UNKNOWN_AREA = -1 -AREA_NAMES = {FULL_DISK: {'short': 'FLDK', 'long': 'Full Disk'}, - NORTH_HEMIS: {'short': 'NH', 'long': 'Northern Hemisphere'}, - SOUTH_HEMIS: {'short': 'SH', 'long': 'Southern Hemisphere'}, - UNKNOWN_AREA: {'short': 'UNKNOWN', 'long': 'Unknown Area'}} - -MTSAT1R = 'MTSAT-1R' -MTSAT2 = 'MTSAT-2' -HIMAWARI8 = 'Himawari-8' -UNKNOWN_PLATFORM = 'Unknown Platform' +AREA_NAMES = {FULL_DISK: {"short": "FLDK", "long": "Full Disk"}, + NORTH_HEMIS: {"short": "NH", "long": "Northern Hemisphere"}, + SOUTH_HEMIS: {"short": "SH", "long": "Southern Hemisphere"}, + UNKNOWN_AREA: {"short": "UNKNOWN", "long": "Unknown Area"}} + +MTSAT1R = "MTSAT-1R" +MTSAT2 = "MTSAT-2" +HIMAWARI8 = "Himawari-8" +UNKNOWN_PLATFORM = "Unknown Platform" PLATFORMS = { - 'GEOS(140.00)': MTSAT1R, - 'GEOS(140.25)': MTSAT1R, - 'GEOS(140.70)': HIMAWARI8, - 'GEOS(145.00)': MTSAT2, + "GEOS(140.00)": MTSAT1R, + "GEOS(140.25)": MTSAT1R, + "GEOS(140.70)": HIMAWARI8, + "GEOS(145.00)": MTSAT2, } SENSORS = { - MTSAT1R: 'jami', - MTSAT2: 'mtsat2_imager', - HIMAWARI8: 'ahi' + MTSAT1R: "jami", + MTSAT2: "mtsat2_imager", + HIMAWARI8: "ahi" } def mjd2datetime64(mjd): """Convert Modified Julian Day (MJD) to datetime64.""" - epoch = np.datetime64('1858-11-17 00:00') + epoch = np.datetime64("1858-11-17 00:00") day2usec = 24 * 3600 * 1E6 - mjd_usec = (mjd * day2usec).astype(np.int64).astype('timedelta64[us]') + mjd_usec = (mjd * day2usec).astype(np.int64).astype("timedelta64[us]") return epoch + mjd_usec @@ -242,20 +242,20 @@ def __init__(self, filename, filename_info, filetype_info, use_acquisition_time_ jma_text_headers)) self._use_acquisition_time_as_start_time = use_acquisition_time_as_start_time - self.mda['segment_sequence_number'] = self.mda['image_segm_seq_no'] - self.mda['planned_end_segment_number'] = self.mda['total_no_image_segm'] - self.mda['planned_start_segment_number'] = 1 + self.mda["segment_sequence_number"] = self.mda["image_segm_seq_no"] + self.mda["planned_end_segment_number"] = self.mda["total_no_image_segm"] + self.mda["planned_start_segment_number"] = 1 - items = self.mda['image_data_function'].decode().split('\r') - if items[0].startswith('$HALFTONE'): + items = self.mda["image_data_function"].decode().split("\r") + if items[0].startswith("$HALFTONE"): self.calibration_table = [] for item in items[1:]: - if item == '': + if item == "": continue - key, value = item.split(':=') - if key.startswith('_UNIT'): - self.mda['unit'] = item.split(':=')[1] - elif key.startswith('_NAME'): + key, value = item.split(":=") + if key.startswith("_UNIT"): + self.mda["unit"] = item.split(":=")[1] + elif key.startswith("_NAME"): pass elif key.isdigit(): key = int(key) @@ -264,12 +264,12 @@ def __init__(self, filename, filename_info, filetype_info, use_acquisition_time_ self.calibration_table = np.array(self.calibration_table) - self.projection_name = self.mda['projection_name'].decode().strip() - sublon = float(self.projection_name.split('(')[1][:-1]) - self.mda['projection_parameters']['SSP_longitude'] = sublon + self.projection_name = self.mda["projection_name"].decode().strip() + sublon = float(self.projection_name.split("(")[1][:-1]) + self.mda["projection_parameters"]["SSP_longitude"] = sublon self.platform = self._get_platform() - self.is_segmented = self.mda['segment_sequence_number'] > 0 - self.area_id = filename_info.get('area', UNKNOWN_AREA) + self.is_segmented = self.mda["segment_sequence_number"] > 0 + self.area_id = filename_info.get("area", UNKNOWN_AREA) if self.area_id not in AREA_NAMES: self.area_id = UNKNOWN_AREA self.area = self._get_area_def() @@ -304,7 +304,7 @@ def _get_platform(self): try: return PLATFORMS[self.projection_name] except KeyError: - logger.error('Unable to determine platform: Unknown projection ' + logger.error("Unable to determine platform: Unknown projection " 'name "{}"'.format(self.projection_name)) return UNKNOWN_PLATFORM @@ -320,8 +320,8 @@ def _check_sensor_platform_consistency(self, sensor): """ ref_sensor = SENSORS.get(self.platform, None) if ref_sensor and not sensor == ref_sensor: - logger.error('Sensor-Platform mismatch: {} is not a payload ' - 'of {}. Did you choose the correct reader?' + logger.error("Sensor-Platform mismatch: {} is not a payload " + "of {}. Did you choose the correct reader?" .format(sensor, self.platform)) def _get_line_offset(self): @@ -335,41 +335,41 @@ def _get_line_offset(self): because this is what get_geostationary_area_extent() expects. """ # Get line offset from the file - nlines = int(self.mda['number_of_lines']) - loff = np.float32(self.mda['loff']) + nlines = int(self.mda["number_of_lines"]) + loff = np.float32(self.mda["loff"]) # Adapt it to the current segment if self.is_segmented: # loff in the file specifies the offset of the full disk image # centre (1375/2750 for VIS/IR) - segment_number = self.mda['segment_sequence_number'] - 1 - loff -= (self.mda['total_no_image_segm'] - segment_number - 1) * nlines + segment_number = self.mda["segment_sequence_number"] - 1 + loff -= (self.mda["total_no_image_segm"] - segment_number - 1) * nlines elif self.area_id in (NORTH_HEMIS, SOUTH_HEMIS): # loff in the file specifies the start line of the half disk image # in the full disk image loff = nlines - loff elif self.area_id == UNKNOWN_AREA: - logger.error('Cannot compute line offset for unknown area') + logger.error("Cannot compute line offset for unknown area") return loff def _get_area_def(self): """Get the area definition of the band.""" pdict = { - 'cfac': np.int32(self.mda['cfac']), - 'lfac': np.int32(self.mda['lfac']), - 'coff': np.float32(self.mda['coff']), - 'loff': self._get_line_offset(), - 'ncols': int(self.mda['number_of_columns']), - 'nlines': int(self.mda['number_of_lines']), - 'scandir': 'N2S', - 'a': float(self.mda['projection_parameters']['a']), - 'b': float(self.mda['projection_parameters']['b']), - 'h': float(self.mda['projection_parameters']['h']), - 'ssp_lon': float(self.mda['projection_parameters']['SSP_longitude']), - 'a_name': AREA_NAMES[self.area_id]['short'], - 'a_desc': AREA_NAMES[self.area_id]['long'], - 'p_id': 'geosmsg' + "cfac": np.int32(self.mda["cfac"]), + "lfac": np.int32(self.mda["lfac"]), + "coff": np.float32(self.mda["coff"]), + "loff": self._get_line_offset(), + "ncols": int(self.mda["number_of_columns"]), + "nlines": int(self.mda["number_of_lines"]), + "scandir": "N2S", + "a": float(self.mda["projection_parameters"]["a"]), + "b": float(self.mda["projection_parameters"]["b"]), + "h": float(self.mda["projection_parameters"]["h"]), + "ssp_lon": float(self.mda["projection_parameters"]["SSP_longitude"]), + "a_name": AREA_NAMES[self.area_id]["short"], + "a_desc": AREA_NAMES[self.area_id]["long"], + "p_id": "geosmsg" } area_extent = get_area_extent(pdict) return get_area_definition(pdict, area_extent) @@ -385,22 +385,22 @@ def get_dataset(self, key, info): # Filenames of segmented data is identical for MTSAT-1R, MTSAT-2 # and Himawari-8/9. Make sure we have the correct reader for the data # at hand. - self._check_sensor_platform_consistency(info['sensor']) + self._check_sensor_platform_consistency(info["sensor"]) # Calibrate and mask space pixels res = self._mask_space(self.calibrate(res, key["calibration"])) # Add scanline acquisition time - res.coords['acq_time'] = ('y', self.acq_time) - res.coords['acq_time'].attrs['long_name'] = 'Scanline acquisition time' + res.coords["acq_time"] = ("y", self.acq_time) + res.coords["acq_time"].attrs["long_name"] = "Scanline acquisition time" # Update attributes res.attrs.update(info) - res.attrs['platform_name'] = self.platform - res.attrs['orbital_parameters'] = { - 'projection_longitude': float(self.mda['projection_parameters']['SSP_longitude']), - 'projection_latitude': 0., - 'projection_altitude': float(self.mda['projection_parameters']['h'])} + res.attrs["platform_name"] = self.platform + res.attrs["orbital_parameters"] = { + "projection_longitude": float(self.mda["projection_parameters"]["SSP_longitude"]), + "projection_latitude": 0., + "projection_altitude": float(self.mda["projection_parameters"]["h"])} return res @@ -419,17 +419,17 @@ def _get_acq_time(self): Missing timestamps in between are computed using linear interpolation. """ - buf_b = np.frombuffer(self.mda['image_observation_time'], + buf_b = np.frombuffer(self.mda["image_observation_time"], dtype=image_observation_time) # Replace \r by \n before encoding, otherwise encoding will drop all # elements except the last one - buf_s = b''.join(buf_b['times']).replace(b'\r', b'\n').decode() + buf_s = b"".join(buf_b["times"]).replace(b"\r", b"\n").decode() # Split into key:=value pairs; then extract line number and timestamp - splits = buf_s.strip().split('\n') - lines_sparse = [int(s.split(':=')[1]) for s in splits[0::2]] - times_sparse = [float(s.split(':=')[1]) for s in splits[1::2]] + splits = buf_s.strip().split("\n") + lines_sparse = [int(s.split(":=")[1]) for s in splits[0::2]] + times_sparse = [float(s.split(":=")[1]) for s in splits[1::2]] if self.platform == HIMAWARI8: # Only a couple of timestamps in the header, and only the first @@ -454,9 +454,9 @@ def calibrate(self, data, calibration): """Calibrate the data.""" tic = datetime.now() - if calibration == 'counts': + if calibration == "counts": return data - if calibration == 'radiance': + if calibration == "radiance": raise NotImplementedError("Can't calibrate to radiance.") cal = self.calibration_table diff --git a/satpy/readers/hrpt.py b/satpy/readers/hrpt.py index cbde23559c..2a54eed664 100644 --- a/satpy/readers/hrpt.py +++ b/satpy/readers/hrpt.py @@ -48,21 +48,21 @@ AVHRR_CHANNEL_NAMES = ("1", "2", "3a", "3b", "4", "5") -dtype = np.dtype([('frame_sync', '>u2', (6, )), - ('id', [('id', '>u2'), - ('spare', '>u2')]), - ('timecode', '>u2', (4, )), - ('telemetry', [("ramp_calibration", '>u2', (5, )), - ("PRT", '>u2', (3, )), - ("ch3_patch_temp", '>u2'), - ("spare", '>u2'), ]), - ('back_scan', '>u2', (10, 3)), - ('space_data', '>u2', (10, 5)), - ('sync', '>u2'), - ('TIP_data', '>u2', (520, )), - ('spare', '>u2', (127, )), - ('image_data', '>u2', (2048, 5)), - ('aux_sync', '>u2', (100, ))]) +dtype = np.dtype([("frame_sync", ">u2", (6, )), + ("id", [("id", ">u2"), + ("spare", ">u2")]), + ("timecode", ">u2", (4, )), + ("telemetry", [("ramp_calibration", ">u2", (5, )), + ("PRT", ">u2", (3, )), + ("ch3_patch_temp", ">u2"), + ("spare", ">u2"), ]), + ("back_scan", ">u2", (10, 3)), + ("space_data", ">u2", (10, 5)), + ("sync", ">u2"), + ("TIP_data", ">u2", (520, )), + ("spare", ">u2", (127, )), + ("image_data", ">u2", (2048, 5)), + ("aux_sync", ">u2", (100, ))]) def time_seconds(tc_array, year): @@ -78,9 +78,9 @@ def time_seconds(tc_array, year): word = tc_array[:, 3] msecs += word & 1023 return (np.datetime64( - str(year) + '-01-01T00:00:00Z', 's') + - msecs[:].astype('timedelta64[ms]') + - (day - 1)[:].astype('timedelta64[D]')) + str(year) + "-01-01T00:00:00Z", "s") + + msecs[:].astype("timedelta64[ms]") + + (day - 1)[:].astype("timedelta64[D]")) def bfield(array, bit): @@ -111,13 +111,13 @@ def geo_interpolate(lons32km, lats32km): def _get_channel_index(key): """Get the avhrr channel index.""" - avhrr_channel_index = {'1': 0, - '2': 1, - '3a': 2, - '3b': 2, - '4': 3, - '5': 4} - index = avhrr_channel_index[key['name']] + avhrr_channel_index = {"1": 0, + "2": 1, + "3a": 2, + "3b": 2, + "4": 3, + "5": 4} + index = avhrr_channel_index[key["name"]] return index @@ -128,9 +128,9 @@ def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(HRPTFile, self).__init__(filename, filename_info, filetype_info) self.channels = {i: None for i in AVHRR_CHANNEL_NAMES} - self.units = {i: 'counts' for i in AVHRR_CHANNEL_NAMES} + self.units = {i: "counts" for i in AVHRR_CHANNEL_NAMES} - self.year = filename_info.get('start_time', datetime.utcnow()).year + self.year = filename_info.get("start_time", datetime.utcnow()).year @cached_property def times(self): @@ -151,7 +151,7 @@ def read(self): """Read the file.""" with open(self.filename, "rb") as fp_: data = np.memmap(fp_, dtype=dtype, mode="r") - if np.all(np.median(data['frame_sync'], axis=0) > 1024): + if np.all(np.median(data["frame_sync"], axis=0) > 1024): data = self._data.newbyteorder() return data @@ -163,32 +163,32 @@ def platform_name(self): def get_dataset(self, key, info): """Get the dataset.""" attrs = info.copy() - attrs['platform_name'] = self.platform_name + attrs["platform_name"] = self.platform_name - if key['name'] in ['latitude', 'longitude']: + if key["name"] in ["latitude", "longitude"]: data = self._get_navigation_data(key) else: data = self._get_channel_data(key) - result = xr.DataArray(data, dims=['y', 'x'], attrs=attrs) + result = xr.DataArray(data, dims=["y", "x"], attrs=attrs) mask = self._get_ch3_mask_or_true(key) return result.where(mask) def _get_channel_data(self, key): """Get channel data.""" data = da.from_array(self._data["image_data"][:, :, _get_channel_index(key)], chunks=self._chunks) - if key['calibration'] != 'counts': - if key['name'] in ['1', '2', '3a']: + if key["calibration"] != "counts": + if key["name"] in ["1", "2", "3a"]: data = self.calibrate_solar_channel(data, key) - if key['name'] in ['3b', '4', '5']: + if key["name"] in ["3b", "4", "5"]: data = self.calibrate_thermal_channel(data, key) return data def _get_navigation_data(self, key): """Get navigation data.""" lons, lats = self.lons_lats - if key['name'] == 'latitude': + if key["name"] == "latitude": data = da.from_array(lats, chunks=self._chunks) else: data = da.from_array(lons, chunks=self._chunks) @@ -196,9 +196,9 @@ def _get_navigation_data(self, key): def _get_ch3_mask_or_true(self, key): mask = True - if key['name'] == '3a': + if key["name"] == "3a": mask = np.tile(np.logical_not(self._is3b), (2048, 1)).T - elif key['name'] == '3b': + elif key["name"] == "3b": mask = np.tile(self._is3b, (2048, 1)).T return mask @@ -211,7 +211,7 @@ def calibrate_thermal_channel(self, data, key): from pygac.calibration import calibrate_thermal line_numbers = ( np.round((self.times - self.times[-1]) / - np.timedelta64(166666667, 'ns'))).astype(int) + np.timedelta64(166666667, "ns"))).astype(int) line_numbers -= line_numbers[0] prt, ict, space = self.telemetry index = _get_channel_index(key) @@ -224,8 +224,8 @@ def calibrate_solar_channel(self, data, key): """Calibrate a solar channel.""" from pygac.calibration import calibrate_solar julian_days = ((np.datetime64(self.start_time) - - np.datetime64(str(self.year) + '-01-01T00:00:00Z')) - / np.timedelta64(1, 'D')) + - np.datetime64(str(self.year) + "-01-01T00:00:00Z")) + / np.timedelta64(1, "D")) data = calibrate_solar(data, _get_channel_index(key), self.year, julian_days, self.calibrator) return data @@ -234,16 +234,16 @@ def calibrate_solar_channel(self, data, key): def calibrator(self): """Create a calibrator for the data.""" from pygac.calibration import Calibrator - pg_spacecraft = ''.join(self.platform_name.split()).lower() + pg_spacecraft = "".join(self.platform_name.split()).lower() return Calibrator(pg_spacecraft) @cached_property def telemetry(self): """Get the telemetry.""" # This isn't converted to dask arrays as it does not work with pygac - prt = np.mean(self._data["telemetry"]['PRT'], axis=1) - ict = np.mean(self._data['back_scan'], axis=1) - space = np.mean(self._data['space_data'][:, :], axis=1) + prt = np.mean(self._data["telemetry"]["PRT"], axis=1) + ict = np.mean(self._data["back_scan"], axis=1) + space = np.mean(self._data["space_data"][:, :], axis=1) return prt, ict, space diff --git a/satpy/readers/hsaf_grib.py b/satpy/readers/hsaf_grib.py index 69361cb648..a041bf0c73 100644 --- a/satpy/readers/hsaf_grib.py +++ b/satpy/readers/hsaf_grib.py @@ -39,7 +39,7 @@ CHUNK_SIZE = get_legacy_chunk_size() CF_UNITS = { - 'none': '1', + "none": "1", } @@ -67,7 +67,7 @@ def __init__(self, filename, filename_info, filetype_info): @staticmethod def _get_datetime(msg): - dtstr = str(msg['dataDate']) + str(msg['dataTime']).zfill(4) + dtstr = str(msg["dataDate"]) + str(msg["dataTime"]).zfill(4) return datetime.strptime(dtstr, "%Y%m%d%H%M") @property @@ -78,19 +78,19 @@ def analysis_time(self): def get_metadata(self, msg): """Get the metadata.""" try: - center_description = msg['centreDescription'] + center_description = msg["centreDescription"] except (RuntimeError, KeyError): center_description = None ds_info = { - 'filename': self.filename, - 'shortName': msg['shortName'], - 'long_name': msg['name'], - 'units': msg['units'], - 'centreDescription': center_description, - 'data_time': self._analysis_time, - 'nx': msg['Nx'], - 'ny': msg['Ny'], - 'projparams': msg.projparams + "filename": self.filename, + "shortName": msg["shortName"], + "long_name": msg["name"], + "units": msg["units"], + "centreDescription": center_description, + "data_time": self._analysis_time, + "nx": msg["Nx"], + "ny": msg["Ny"], + "projparams": msg.projparams } return ds_info @@ -106,28 +106,28 @@ def _get_area_def(self, msg): """Get the area definition of the datasets in the file.""" proj_param = msg.projparams.copy() - Rx = 2 * np.arcsin(1. / msg['NrInRadiusOfEarth']) / msg['dx'] - Ry = 2 * np.arcsin(1. / msg['NrInRadiusOfEarth']) / msg['dy'] + Rx = 2 * np.arcsin(1. / msg["NrInRadiusOfEarth"]) / msg["dx"] + Ry = 2 * np.arcsin(1. / msg["NrInRadiusOfEarth"]) / msg["dy"] - x_0 = - msg['XpInGridLengths'] - x_1 = msg['Nx'] - msg['XpInGridLengths'] - y_0 = (msg['Ny'] - msg['YpInGridLengths']) * -1 - y_1 = msg['YpInGridLengths'] + x_0 = - msg["XpInGridLengths"] + x_1 = msg["Nx"] - msg["XpInGridLengths"] + y_0 = (msg["Ny"] - msg["YpInGridLengths"]) * -1 + y_1 = msg["YpInGridLengths"] - min_x = (x_0 * Rx) * proj_param['h'] - max_x = (x_1 * Rx) * proj_param['h'] + min_x = (x_0 * Rx) * proj_param["h"] + max_x = (x_1 * Rx) * proj_param["h"] - min_y = (y_0 * Ry) * proj_param['h'] - max_y = (y_1 * Ry) * proj_param['h'] + min_y = (y_0 * Ry) * proj_param["h"] + max_y = (y_1 * Ry) * proj_param["h"] area_extent = (min_x, min_y, max_x, max_y) - area = geometry.AreaDefinition('hsaf_region', - 'A region from H-SAF', - 'geos', + area = geometry.AreaDefinition("hsaf_region", + "A region from H-SAF", + "geos", proj_param, - msg['Nx'], - msg['Ny'], + msg["Nx"], + msg["Ny"], area_extent) return area @@ -139,24 +139,24 @@ def _get_message(self, idx): def get_dataset(self, ds_id, ds_info): """Read a GRIB message into an xarray DataArray.""" - if (ds_id['name'] not in self.filename): - raise IOError("File does not contain {} data".format(ds_id['name'])) + if (ds_id["name"] not in self.filename): + raise IOError("File does not contain {} data".format(ds_id["name"])) msg = self._get_message(1) ds_info = self.get_metadata(msg) - ds_info['end_time'] = ds_info['data_time'] + ds_info["end_time"] = ds_info["data_time"] - if (ds_id['name'] == 'h05' or ds_id['name'] == 'h05B'): + if (ds_id["name"] == "h05" or ds_id["name"] == "h05B"): flen = len(self.filename) timedelt = self.filename[flen-10:flen-8] - ds_info['start_time'] = (ds_info['end_time'] - + ds_info["start_time"] = (ds_info["end_time"] - timedelta(hours=int(timedelt))) else: - ds_info['start_time'] = ds_info['end_time'] - fill = msg['missingValue'] + ds_info["start_time"] = ds_info["end_time"] + fill = msg["missingValue"] data = msg.values.astype(np.float32) - if msg.valid_key('jScansPositively') and msg['jScansPositively'] == 1: + if msg.valid_key("jScansPositively") and msg["jScansPositively"] == 1: data = data[::-1] if isinstance(data, np.ma.MaskedArray): @@ -166,4 +166,4 @@ def get_dataset(self, ds_id, ds_info): data[data == fill] = np.nan data = da.from_array(data, chunks=CHUNK_SIZE) - return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) + return xr.DataArray(data, attrs=ds_info, dims=("y", "x")) diff --git a/satpy/readers/hsaf_h5.py b/satpy/readers/hsaf_h5.py index 73be63b29f..478b91ce2d 100644 --- a/satpy/readers/hsaf_h5.py +++ b/satpy/readers/hsaf_h5.py @@ -42,7 +42,7 @@ def __init__(self, filename, filename_info, filetype_info): super(HSAFFileHandler, self).__init__(filename, filename_info, filetype_info) - self._h5fh = h5py.File(self.filename, 'r') + self._h5fh = h5py.File(self.filename, "r") @property def end_time(self): @@ -52,21 +52,21 @@ def end_time(self): @property def start_time(self): """Get start time.""" - return self.filename_info['sensing_time'] + return self.filename_info["sensing_time"] def _prepare_variable_for_palette(self, dset, ds_info): colormap = np.array(dset) - return xr.DataArray(colormap, attrs=ds_info, dims=('idx', 'RGB')) + return xr.DataArray(colormap, attrs=ds_info, dims=("idx", "RGB")) def get_metadata(self, dset, name): """Get the metadata.""" - ds_info = {'name': name} - if name == 'SC': + ds_info = {"name": name} + if name == "SC": ds_info.update({ - 'filename': self.filename, - 'data_time': self.start_time, - 'nx': dset.shape[1], - 'ny': dset.shape[0] + "filename": self.filename, + "data_time": self.start_time, + "nx": dset.shape[1], + "ny": dset.shape[0] }) return ds_info @@ -76,7 +76,7 @@ def get_area_def(self, dsid): Since it is not available in the HDF5 message, using hardcoded one (it's known). """ - if dsid['name'] == 'SC': + if dsid["name"] == "SC": return self._get_area_def() raise NotImplementedError @@ -109,31 +109,31 @@ def _get_area_def(self): units: m """ - fd_def = get_area_def('msg_seviri_fes_3km') + fd_def = get_area_def("msg_seviri_fes_3km") hsaf_def = fd_def[AREA_Y_OFFSET:AREA_Y_OFFSET+916, AREA_X_OFFSET:AREA_X_OFFSET+1902] return hsaf_def def _get_dataset(self, ds_name): - if ds_name == 'SC_pal': - _ds_name = 'colormap' + if ds_name == "SC_pal": + _ds_name = "colormap" else: _ds_name = ds_name return self._h5fh.get(_ds_name) def get_dataset(self, ds_id, ds_info): """Read a HDF5 file into an xarray DataArray.""" - ds = self._get_dataset(ds_id['name']) - ds_info = self.get_metadata(ds, ds_id['name']) + ds = self._get_dataset(ds_id["name"]) + ds_info = self.get_metadata(ds, ds_id["name"]) - if ds_id['name'] == 'SC': - ds_info['start_time'] = self.start_time - ds_info['data_time'] = self.start_time - ds_info['end_time'] = self.end_time + if ds_id["name"] == "SC": + ds_info["start_time"] = self.start_time + ds_info["data_time"] = self.start_time + ds_info["end_time"] = self.end_time data = da.from_array(ds, chunks=CHUNK_SIZE) - return xr.DataArray(data, attrs=ds_info, dims=('y', 'x')) + return xr.DataArray(data, attrs=ds_info, dims=("y", "x")) - elif ds_id['name'] == 'SC_pal': + elif ds_id["name"] == "SC_pal": return self._prepare_variable_for_palette(ds, ds_info) diff --git a/satpy/readers/hy2_scat_l2b_h5.py b/satpy/readers/hy2_scat_l2b_h5.py index 64520bae9a..929d7dc934 100644 --- a/satpy/readers/hy2_scat_l2b_h5.py +++ b/satpy/readers/hy2_scat_l2b_h5.py @@ -35,82 +35,82 @@ class HY2SCATL2BH5FileHandler(HDF5FileHandler): @property def start_time(self): """Time for first observation.""" - return datetime.strptime(self['/attr/Range_Beginning_Time'], - '%Y%m%dT%H:%M:%S') + return datetime.strptime(self["/attr/Range_Beginning_Time"], + "%Y%m%dT%H:%M:%S") @property def end_time(self): """Time for final observation.""" - return datetime.strptime(self['/attr/Range_Ending_Time'], - '%Y%m%dT%H:%M:%S') + return datetime.strptime(self["/attr/Range_Ending_Time"], + "%Y%m%dT%H:%M:%S") @property def platform_name(self): """Get the Platform ShortName.""" - return self['/attr/Platform_ShortName'] + return self["/attr/Platform_ShortName"] def get_variable_metadata(self): """Get the variable metadata.""" - info = getattr(self, 'attrs', {}) + info = getattr(self, "attrs", {}) info.update({ - "Equator_Crossing_Longitude": self['/attr/Equator_Crossing_Longitude'], - "Equator_Crossing_Time": self['/attr/Equator_Crossing_Time'], - "Input_L2A_Filename": self['/attr/Input_L2A_Filename'], - "L2B_Actual_WVC_Rows": self['/attr/L2B_Actual_WVC_Rows'], - "Orbit_Inclination": self['/attr/Orbit_Inclination'], - "Orbit_Number": self['/attr/Orbit_Number'], - "Output_L2B_Filename": self['/attr/Output_L2B_Filename'], - "Production_Date_Time": self['/attr/Production_Date_Time'], - "L2B_Expected_WVC_Rows": self['/attr/L2B_Expected_WVC_Rows'] + "Equator_Crossing_Longitude": self["/attr/Equator_Crossing_Longitude"], + "Equator_Crossing_Time": self["/attr/Equator_Crossing_Time"], + "Input_L2A_Filename": self["/attr/Input_L2A_Filename"], + "L2B_Actual_WVC_Rows": self["/attr/L2B_Actual_WVC_Rows"], + "Orbit_Inclination": self["/attr/Orbit_Inclination"], + "Orbit_Number": self["/attr/Orbit_Number"], + "Output_L2B_Filename": self["/attr/Output_L2B_Filename"], + "Production_Date_Time": self["/attr/Production_Date_Time"], + "L2B_Expected_WVC_Rows": self["/attr/L2B_Expected_WVC_Rows"] }) try: - info.update({"L2B_Number_WVC_cells": self['/attr/L2B_Number_WVC_cells']}) + info.update({"L2B_Number_WVC_cells": self["/attr/L2B_Number_WVC_cells"]}) except KeyError: - info.update({"L2B_Expected_WVC_Cells": self['/attr/L2B_Expected_WVC_Cells']}) + info.update({"L2B_Expected_WVC_Cells": self["/attr/L2B_Expected_WVC_Cells"]}) return info def get_metadata(self): """Get the metadata.""" - info = getattr(self, 'attrs', {}) + info = getattr(self, "attrs", {}) info.update({ - "WVC_Size": self['/attr/WVC_Size'], - "HDF_Version_Id": self['/attr/HDF_Version_Id'], - "Instrument_ShorName": self['/attr/Instrument_ShorName'], - "L2A_Inputdata_Version": self['/attr/L2A_Inputdata_Version'], - "L2B_Algorithm_Descriptor": self['/attr/L2B_Algorithm_Descriptor'], - "L2B_Data_Version": self['/attr/L2B_Data_Version'], - "L2B_Processing_Type": self['/attr/L2B_Processing_Type'], - "L2B_Processor_Name": self['/attr/L2B_Processor_Name'], - "L2B_Processor_Version": self['/attr/L2B_Processor_Version'], - "Long_Name": self['/attr/Long_Name'], - "Platform_LongName": self['/attr/Platform_LongName'], - "Platform_ShortName": self['/attr/Platform_ShortName'], - "Platform_Type": self['/attr/Platform_Type'], - "Producer_Agency": self['/attr/Producer_Agency'], - "Producer_Institution": self['/attr/Producer_Institution'], - "Rev_Orbit_Perio": self['/attr/Rev_Orbit_Period'], - "Short_Name": self['/attr/Short_Name'], - "Sigma0_Granularity": self['/attr/Sigma0_Granularity'], + "WVC_Size": self["/attr/WVC_Size"], + "HDF_Version_Id": self["/attr/HDF_Version_Id"], + "Instrument_ShorName": self["/attr/Instrument_ShorName"], + "L2A_Inputdata_Version": self["/attr/L2A_Inputdata_Version"], + "L2B_Algorithm_Descriptor": self["/attr/L2B_Algorithm_Descriptor"], + "L2B_Data_Version": self["/attr/L2B_Data_Version"], + "L2B_Processing_Type": self["/attr/L2B_Processing_Type"], + "L2B_Processor_Name": self["/attr/L2B_Processor_Name"], + "L2B_Processor_Version": self["/attr/L2B_Processor_Version"], + "Long_Name": self["/attr/Long_Name"], + "Platform_LongName": self["/attr/Platform_LongName"], + "Platform_ShortName": self["/attr/Platform_ShortName"], + "Platform_Type": self["/attr/Platform_Type"], + "Producer_Agency": self["/attr/Producer_Agency"], + "Producer_Institution": self["/attr/Producer_Institution"], + "Rev_Orbit_Perio": self["/attr/Rev_Orbit_Period"], + "Short_Name": self["/attr/Short_Name"], + "Sigma0_Granularity": self["/attr/Sigma0_Granularity"], }) return info def get_dataset(self, key, info): """Get the dataset.""" - dims = ['y', 'x'] - if self[key['name']].ndim == 3: - dims = ['y', 'x', 'selection'] - data = self[key['name']] + dims = ["y", "x"] + if self[key["name"]].ndim == 3: + dims = ["y", "x", "selection"] + data = self[key["name"]] if "valid range" in data.attrs: - data.attrs.update({'valid_range': data.attrs.pop('valid range')}) - if key['name'] in 'wvc_row_time': - data = data.rename({data.dims[0]: 'y'}) + data.attrs.update({"valid_range": data.attrs.pop("valid range")}) + if key["name"] in "wvc_row_time": + data = data.rename({data.dims[0]: "y"}) else: dim_map = {curr_dim: new_dim for curr_dim, new_dim in zip(data.dims, dims)} data = data.rename(dim_map) data = self._mask_data(data) data = self._scale_data(data) - if key['name'] in 'wvc_lon': + if key["name"] in "wvc_lon": _attrs = data.attrs data = xr.where(data > 180, data - 360., data) data.attrs.update(_attrs) @@ -118,17 +118,17 @@ def get_dataset(self, key, info): data.attrs.update(self.get_metadata()) data.attrs.update(self.get_variable_metadata()) if "Platform_ShortName" in data.attrs: - data.attrs.update({'platform_name': data.attrs['Platform_ShortName']}) + data.attrs.update({"platform_name": data.attrs["Platform_ShortName"]}) return data def _scale_data(self, data): - return data * data.attrs['scale_factor'] + data.attrs['add_offset'] + return data * data.attrs["scale_factor"] + data.attrs["add_offset"] def _mask_data(self, data): _attrs = data.attrs - valid_range = data.attrs['valid_range'] - data = xr.where(data == data.attrs['fill_value'], np.nan, data) + valid_range = data.attrs["valid_range"] + data = xr.where(data == data.attrs["fill_value"], np.nan, data) data = xr.where(data < valid_range[0], np.nan, data) data = xr.where(data > valid_range[1], np.nan, data) data.attrs.update(_attrs) diff --git a/satpy/readers/iasi_l2.py b/satpy/readers/iasi_l2.py index 64a060a789..8280416d8b 100644 --- a/satpy/readers/iasi_l2.py +++ b/satpy/readers/iasi_l2.py @@ -45,33 +45,33 @@ # Epoch for the dates EPOCH = dt.datetime(2000, 1, 1) -SHORT_NAMES = {'M01': 'Metop-B', - 'M02': 'Metop-A', - 'M03': 'Metop-C'} - -DSET_NAMES = {'ozone_mixing_ratio': 'O', - 'ozone_mixing_ratio_quality': 'QO', - 'pressure': 'P', - 'pressure_quality': 'QP', - 'temperature': 'T', - 'temperature_quality': 'QT', - 'water_mixing_ratio': 'W', - 'water_mixing_ratio_quality': 'QW', - 'water_total_column': 'WC', - 'ozone_total_column': 'OC', - 'surface_skin_temperature': 'Ts', - 'surface_skin_temperature_quality': 'QTs', - 'emissivity': 'E', - 'emissivity_quality': 'QE'} - -GEO_NAMES = {'latitude': 'Latitude', - 'longitude': 'Longitude', - 'satellite_azimuth_angle': 'SatAzimuth', - 'satellite_zenith_angle': 'SatZenith', - 'sensing_time': {'day': 'SensingTime_day', - 'msec': 'SensingTime_msec'}, - 'solar_azimuth_angle': 'SunAzimuth', - 'solar_zenith_angle': 'SunZenith'} +SHORT_NAMES = {"M01": "Metop-B", + "M02": "Metop-A", + "M03": "Metop-C"} + +DSET_NAMES = {"ozone_mixing_ratio": "O", + "ozone_mixing_ratio_quality": "QO", + "pressure": "P", + "pressure_quality": "QP", + "temperature": "T", + "temperature_quality": "QT", + "water_mixing_ratio": "W", + "water_mixing_ratio_quality": "QW", + "water_total_column": "WC", + "ozone_total_column": "OC", + "surface_skin_temperature": "Ts", + "surface_skin_temperature_quality": "QTs", + "emissivity": "E", + "emissivity_quality": "QE"} + +GEO_NAMES = {"latitude": "Latitude", + "longitude": "Longitude", + "satellite_azimuth_angle": "SatAzimuth", + "satellite_zenith_angle": "SatZenith", + "sensing_time": {"day": "SensingTime_day", + "msec": "SensingTime_msec"}, + "solar_azimuth_angle": "SunAzimuth", + "solar_zenith_angle": "SunZenith"} LOGGER = logging.getLogger(__name__) @@ -88,51 +88,51 @@ def __init__(self, filename, filename_info, filetype_info): self.finfo = filename_info self.lons = None self.lats = None - self.sensor = 'iasi' + self.sensor = "iasi" self.mda = {} - short_name = filename_info['platform_id'] - self.mda['platform_name'] = SHORT_NAMES.get(short_name, short_name) - self.mda['sensor'] = 'iasi' + short_name = filename_info["platform_id"] + self.mda["platform_name"] = SHORT_NAMES.get(short_name, short_name) + self.mda["sensor"] = "iasi" @property def start_time(self): """Get the start time.""" - return self.finfo['start_time'] + return self.finfo["start_time"] @property def end_time(self): """Get the end time.""" end_time = dt.datetime.combine(self.start_time.date(), - self.finfo['end_time'].time()) + self.finfo["end_time"].time()) if end_time < self.start_time: end_time += dt.timedelta(days=1) return end_time def get_dataset(self, key, info): """Load a dataset.""" - with h5py.File(self.filename, 'r') as fid: - LOGGER.debug('Reading %s.', key['name']) - if key['name'] in DSET_NAMES: + with h5py.File(self.filename, "r") as fid: + LOGGER.debug("Reading %s.", key["name"]) + if key["name"] in DSET_NAMES: m_data = read_dataset(fid, key) else: m_data = read_geo(fid, key) m_data.attrs.update(info) - m_data.attrs['sensor'] = self.sensor + m_data.attrs["sensor"] = self.sensor return m_data def read_dataset(fid, key): """Read dataset.""" - dsid = DSET_NAMES[key['name']] + dsid = DSET_NAMES[key["name"]] dset = fid["/PWLR/" + dsid] if dset.ndim == 3: - dims = ['y', 'x', 'level'] + dims = ["y", "x", "level"] else: - dims = ['y', 'x'] + dims = ["y", "x"] data = xr.DataArray(da.from_array(dset[()], chunks=CHUNK_SIZE), - name=key['name'], dims=dims).astype(np.float32) + name=key["name"], dims=dims).astype(np.float32) data = xr.where(data > 1e30, np.nan, data) dset_attrs = dict(dset.attrs) @@ -143,9 +143,9 @@ def read_dataset(fid, key): def read_geo(fid, key): """Read geolocation and related datasets.""" - dsid = GEO_NAMES[key['name']] + dsid = GEO_NAMES[key["name"]] add_epoch = False - if "time" in key['name']: + if "time" in key["name"]: days = fid["/L1C/" + dsid["day"]][()] msecs = fid["/L1C/" + dsid["msec"]][()] data = _form_datetimes(days, msecs) @@ -155,10 +155,10 @@ def read_geo(fid, key): data = fid["/L1C/" + dsid][()] dtype = np.float32 data = xr.DataArray(da.from_array(data, chunks=CHUNK_SIZE), - name=key['name'], dims=['y', 'x']).astype(dtype) + name=key["name"], dims=["y", "x"]).astype(dtype) if add_epoch: - data.attrs['sensing_time_epoch'] = EPOCH + data.attrs["sensing_time_epoch"] = EPOCH return data diff --git a/satpy/readers/iasi_l2_so2_bufr.py b/satpy/readers/iasi_l2_so2_bufr.py index 1bb1fbf0e0..b5088aa041 100644 --- a/satpy/readers/iasi_l2_so2_bufr.py +++ b/satpy/readers/iasi_l2_so2_bufr.py @@ -31,8 +31,8 @@ Scene(reader="iasi_l2_so2_bufr", filenames=fnames) -Example -------- +Example: +-------- Here is an example how to read the data in satpy: .. code-block:: python @@ -101,9 +101,9 @@ from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size -logger = logging.getLogger('IASIL2SO2BUFR') +logger = logging.getLogger("IASIL2SO2BUFR") CHUNK_SIZE = get_legacy_chunk_size() -data_center_dict = {3: 'METOP-1', 4: 'METOP-2', 5: 'METOP-3'} +data_center_dict = {3: "METOP-1", 4: "METOP-2", 5: "METOP-3"} class IASIL2SO2BUFR(BaseFileHandler): @@ -115,27 +115,27 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): start_time, end_time = self.get_start_end_date() - sc_id = self.get_attribute('satelliteIdentifier') + sc_id = self.get_attribute("satelliteIdentifier") self.metadata = {} - self.metadata['start_time'] = start_time - self.metadata['end_time'] = end_time - self.metadata['SpacecraftName'] = data_center_dict[sc_id] + self.metadata["start_time"] = start_time + self.metadata["end_time"] = end_time + self.metadata["SpacecraftName"] = data_center_dict[sc_id] @property def start_time(self): """Return the start time of data acqusition.""" - return self.metadata['start_time'] + return self.metadata["start_time"] @property def end_time(self): """Return the end time of data acquisition.""" - return self.metadata['end_time'] + return self.metadata["end_time"] @property def platform_name(self): """Return spacecraft name.""" - return '{}'.format(self.metadata['SpacecraftName']) + return "{}".format(self.metadata["SpacecraftName"]) def get_start_end_date(self): """Get the first and last date from the bufr file.""" @@ -146,13 +146,13 @@ def get_start_end_date(self): bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break - ec.codes_set(bufr, 'unpack', 1) - year = ec.codes_get(bufr, 'year') - month = ec.codes_get(bufr, 'month') - day = ec.codes_get(bufr, 'day') - hour = ec.codes_get(bufr, 'hour') - minute = ec.codes_get(bufr, 'minute') - second = ec.codes_get(bufr, 'second') + ec.codes_set(bufr, "unpack", 1) + year = ec.codes_get(bufr, "year") + month = ec.codes_get(bufr, "month") + day = ec.codes_get(bufr, "day") + hour = ec.codes_get(bufr, "hour") + minute = ec.codes_get(bufr, "minute") + second = ec.codes_get(bufr, "second") obs_time = datetime(year=year, month=month, day=day, hour=hour, minute=minute, second=second) @@ -181,7 +181,7 @@ def get_attribute(self, key): bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break - ec.codes_set(bufr, 'unpack', 1) + ec.codes_set(bufr, "unpack", 1) attr = ec.codes_get(bufr, key) ec.codes_release(bufr) @@ -198,7 +198,7 @@ def get_array(self, key): if bufr is None: break - ec.codes_set(bufr, 'unpack', 1) + ec.codes_set(bufr, "unpack", 1) values = ec.codes_get_array( bufr, key, float) @@ -225,12 +225,12 @@ def get_array(self, key): def get_dataset(self, dataset_id, dataset_info): """Get dataset using the BUFR key in dataset_info.""" - arr = self.get_array(dataset_info['key']) - arr[arr == dataset_info['fill_value']] = np.nan + arr = self.get_array(dataset_info["key"]) + arr[arr == dataset_info["fill_value"]] = np.nan - xarr = xr.DataArray(arr, dims=["y", "x"], name=dataset_info['name']) - xarr.attrs['sensor'] = 'IASI' - xarr.attrs['platform_name'] = self.platform_name + xarr = xr.DataArray(arr, dims=["y", "x"], name=dataset_info["name"]) + xarr.attrs["sensor"] = "IASI" + xarr.attrs["platform_name"] = self.platform_name xarr.attrs.update(dataset_info) return xarr diff --git a/satpy/readers/ici_l1b_nc.py b/satpy/readers/ici_l1b_nc.py index a4f15c3c35..d6ebea0c56 100644 --- a/satpy/readers/ici_l1b_nc.py +++ b/satpy/readers/ici_l1b_nc.py @@ -65,26 +65,26 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): filename, filename_info, filetype_info, auto_maskandscale=True, ) # Read the variables which are required for the calibration - measurement = 'data/measurement_data' - self._bt_conversion_a = self[f'{measurement}/bt_conversion_a'].values - self._bt_conversion_b = self[f'{measurement}/bt_conversion_b'].values - self._channel_cw = self[f'{measurement}/centre_wavenumber'].values + measurement = "data/measurement_data" + self._bt_conversion_a = self[f"{measurement}/bt_conversion_a"].values + self._bt_conversion_b = self[f"{measurement}/bt_conversion_b"].values + self._channel_cw = self[f"{measurement}/centre_wavenumber"].values self._n_samples = self[measurement].n_samples.size self._filetype_info = filetype_info - self.orthorect = filetype_info.get('orthorect', True) + self.orthorect = filetype_info.get("orthorect", True) @property def start_time(self): """Get observation start time.""" try: start_time = datetime.strptime( - self['/attr/sensing_start_time_utc'], - '%Y%m%d%H%M%S.%f', + self["/attr/sensing_start_time_utc"], + "%Y%m%d%H%M%S.%f", ) except ValueError: start_time = datetime.strptime( - self['/attr/sensing_start_time_utc'], - '%Y-%m-%d %H:%M:%S.%f', + self["/attr/sensing_start_time_utc"], + "%Y-%m-%d %H:%M:%S.%f", ) return start_time @@ -93,25 +93,25 @@ def end_time(self): """Get observation end time.""" try: end_time = datetime.strptime( - self['/attr/sensing_end_time_utc'], - '%Y%m%d%H%M%S.%f', + self["/attr/sensing_end_time_utc"], + "%Y%m%d%H%M%S.%f", ) except ValueError: end_time = datetime.strptime( - self['/attr/sensing_end_time_utc'], - '%Y-%m-%d %H:%M:%S.%f', + self["/attr/sensing_end_time_utc"], + "%Y-%m-%d %H:%M:%S.%f", ) return end_time @property def platform_name(self): """Return platform name.""" - return self['/attr/spacecraft'] + return self["/attr/spacecraft"] @property def sensor(self): """Return sensor.""" - return self['/attr/instrument'] + return self["/attr/instrument"] @property def ssp_lon(self): @@ -208,7 +208,7 @@ def _interpolate_geo( lons_horn, lats_horn = satint.interpolate() lons[:, :, horn] = lons_horn lats[:, :, horn] = lats_horn - dims = ['y', 'x', third_dim_name] + dims = ["y", "x", third_dim_name] lon = xr.DataArray( lons, attrs=longitude.attrs, @@ -258,16 +258,16 @@ def _interpolate( """Interpolate from tie points to pixel points.""" try: if interpolation_type is InterpolationType.SOLAR_ANGLES: - var_key1 = self.filetype_info['solar_azimuth'] - var_key2 = self.filetype_info['solar_zenith'] + var_key1 = self.filetype_info["solar_azimuth"] + var_key2 = self.filetype_info["solar_zenith"] interp_method = self._interpolate_viewing_angle elif interpolation_type is InterpolationType.OBSERVATION_ANGLES: - var_key1 = self.filetype_info['observation_azimuth'] - var_key2 = self.filetype_info['observation_zenith'] + var_key1 = self.filetype_info["observation_azimuth"] + var_key2 = self.filetype_info["observation_zenith"] interp_method = self._interpolate_viewing_angle else: - var_key1 = self.filetype_info['longitude'] - var_key2 = self.filetype_info['latitude'] + var_key1 = self.filetype_info["longitude"] + var_key2 = self.filetype_info["latitude"] interp_method = self._interpolate_geo return interp_method( self[var_key1], @@ -275,7 +275,7 @@ def _interpolate( self._n_samples, ) except KeyError: - logger.warning(f'Datasets for {interpolation_type.name} interpolation not correctly defined in YAML file') # noqa: E501 + logger.warning(f"Datasets for {interpolation_type.name} interpolation not correctly defined in YAML file") # noqa: E501 return None, None @staticmethod @@ -308,18 +308,18 @@ def _calibrate(self, variable, dataset_info): original metadata. """ - calibration_name = dataset_info['calibration'] - if calibration_name == 'brightness_temperature': - chan_index = dataset_info['chan_index'] + calibration_name = dataset_info["calibration"] + if calibration_name == "brightness_temperature": + chan_index = dataset_info["chan_index"] cw = self._channel_cw[chan_index] a = self._bt_conversion_a[chan_index] b = self._bt_conversion_b[chan_index] calibrated_variable = self._calibrate_bt(variable, cw, a, b) calibrated_variable.attrs = variable.attrs - elif calibration_name == 'radiance': + elif calibration_name == "radiance": calibrated_variable = variable else: - raise ValueError("Unknown calibration %s for dataset %s" % (calibration_name, dataset_info['name'])) # noqa: E501 + raise ValueError("Unknown calibration %s for dataset %s" % (calibration_name, dataset_info["name"])) # noqa: E501 return calibrated_variable @@ -345,18 +345,18 @@ def _orthorectify(self, variable, orthorect_data_name): orthorect_data = orthorect_data.sel({dim: variable[dim]}) variable += np.degrees(orthorect_data.values / MEAN_EARTH_RADIUS) except KeyError: - logger.warning('Required dataset %s for orthorectification not available, skipping', orthorect_data_name) # noqa: E501 + logger.warning("Required dataset %s for orthorectification not available, skipping", orthorect_data_name) # noqa: E501 return variable @staticmethod def _standardize_dims(variable): """Standardize dims to y, x.""" - if 'n_scan' in variable.dims: - variable = variable.rename({'n_scan': 'y'}) - if 'n_samples' in variable.dims: - variable = variable.rename({'n_samples': 'x'}) - if variable.dims[0] == 'x': - variable = variable.transpose('y', 'x') + if "n_scan" in variable.dims: + variable = variable.rename({"n_scan": "y"}) + if "n_samples" in variable.dims: + variable = variable.rename({"n_samples": "x"}) + if variable.dims[0] == "x": + variable = variable.transpose("y", "x") return variable def _filter_variable(self, variable, dataset_info): @@ -385,12 +385,12 @@ def _get_third_dimension_name(variable): def _fetch_variable(self, var_key): """Fetch variable.""" if var_key in [ - 'longitude', - 'latitude', - 'observation_zenith', - 'observation_azimuth', - 'solar_zenith', - 'solar_azimuth', + "longitude", + "latitude", + "observation_zenith", + "observation_azimuth", + "solar_zenith", + "solar_azimuth", ] and getattr(self, var_key) is not None: variable = getattr(self, var_key).copy() else: @@ -399,18 +399,18 @@ def _fetch_variable(self, var_key): def get_dataset(self, dataset_id, dataset_info): """Get dataset using file_key in dataset_info.""" - var_key = dataset_info['file_key'] - logger.debug(f'Reading in file to get dataset with key {var_key}.') + var_key = dataset_info["file_key"] + logger.debug(f"Reading in file to get dataset with key {var_key}.") try: variable = self._fetch_variable(var_key) except KeyError: - logger.warning(f'Could not find key {var_key} in NetCDF file, no valid Dataset created') # noqa: E501 + logger.warning(f"Could not find key {var_key} in NetCDF file, no valid Dataset created") # noqa: E501 return None variable = self._filter_variable(variable, dataset_info) - if dataset_info.get('calibration') is not None: + if dataset_info.get("calibration") is not None: variable = self._calibrate(variable, dataset_info) if self.orthorect: - orthorect_data_name = dataset_info.get('orthorect_data', None) + orthorect_data_name = dataset_info.get("orthorect_data", None) if orthorect_data_name is not None: variable = self._orthorectify(variable, orthorect_data_name) variable = self._manage_attributes(variable, dataset_info) @@ -420,7 +420,7 @@ def get_dataset(self, dataset_id, dataset_info): def _manage_attributes(self, variable, dataset_info): """Manage attributes of the dataset.""" - variable.attrs.setdefault('units', None) + variable.attrs.setdefault("units", None) variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) return variable @@ -428,21 +428,21 @@ def _manage_attributes(self, variable, dataset_info): def _get_global_attributes(self): """Create a dictionary of global attributes.""" return { - 'filename': self.filename, - 'start_time': self.start_time, - 'end_time': self.end_time, - 'spacecraft_name': self.platform_name, - 'ssp_lon': self.ssp_lon, - 'sensor': self.sensor, - 'filename_start_time': self.filename_info['sensing_start_time'], - 'filename_end_time': self.filename_info['sensing_end_time'], - 'platform_name': self.platform_name, - 'quality_group': self._get_quality_attributes(), + "filename": self.filename, + "start_time": self.start_time, + "end_time": self.end_time, + "spacecraft_name": self.platform_name, + "ssp_lon": self.ssp_lon, + "sensor": self.sensor, + "filename_start_time": self.filename_info["sensing_start_time"], + "filename_end_time": self.filename_info["sensing_end_time"], + "platform_name": self.platform_name, + "quality_group": self._get_quality_attributes(), } def _get_quality_attributes(self): """Get quality attributes.""" - quality_group = self['quality'] + quality_group = self["quality"] quality_dict = {} for key in quality_group: # Add the values (as Numpy array) of each variable in the group diff --git a/satpy/readers/insat3d_img_l1b_h5.py b/satpy/readers/insat3d_img_l1b_h5.py index fb0697be45..a7dcf371cc 100644 --- a/satpy/readers/insat3d_img_l1b_h5.py +++ b/satpy/readers/insat3d_img_l1b_h5.py @@ -120,13 +120,13 @@ class Insat3DIMGL1BH5FileHandler(BaseFileHandler): @property def start_time(self): """Get the start time.""" - start_time = datetime.strptime(self.datatree.attrs['Acquisition_Start_Time'], '%d-%b-%YT%H:%M:%S') + start_time = datetime.strptime(self.datatree.attrs["Acquisition_Start_Time"], "%d-%b-%YT%H:%M:%S") return start_time @property def end_time(self): """Get the end time.""" - end_time = datetime.strptime(self.datatree.attrs['Acquisition_End_Time'], '%d-%b-%YT%H:%M:%S') + end_time = datetime.strptime(self.datatree.attrs["Acquisition_End_Time"], "%d-%b-%YT%H:%M:%S") return end_time @cached_property @@ -154,7 +154,7 @@ def get_dataset(self, ds_id, ds_info): darr = ds["IMG_" + ds_id["name"] + calibration] - nlat, nlon = ds.attrs['Nominal_Central_Point_Coordinates(degrees)_Latitude_Longitude'] + nlat, nlon = ds.attrs["Nominal_Central_Point_Coordinates(degrees)_Latitude_Longitude"] darr.attrs["orbital_parameters"] = dict(satellite_nominal_longitude=float(nlon), satellite_nominal_latitude=float(nlat), satellite_nominal_altitude=float(ds.attrs["Nominal_Altitude(km)"]), @@ -183,20 +183,20 @@ def get_area_def(self, ds_id): b = 6356752.314245 pdict = { - 'cfac': cfac, - 'lfac': lfac, - 'coff': cols / 2, - 'loff': lines / 2, - 'ncols': cols, - 'nlines': lines, - 'scandir': 'N2S', - 'a': a, - 'b': b, - 'h': h, - 'ssp_lon': 82.0, - 'a_name': "insat3d82", - 'a_desc': "insat3d82", - 'p_id': 'geosmsg' + "cfac": cfac, + "lfac": lfac, + "coff": cols / 2, + "loff": lines / 2, + "ncols": cols, + "nlines": lines, + "scandir": "N2S", + "a": a, + "b": b, + "h": h, + "ssp_lon": 82.0, + "a_name": "insat3d82", + "a_desc": "insat3d82", + "p_id": "geosmsg" } area_extent = get_area_extent(pdict) adef = get_area_definition(pdict, area_extent) diff --git a/satpy/readers/li_base_nc.py b/satpy/readers/li_base_nc.py index 57e234b910..848306e77c 100644 --- a/satpy/readers/li_base_nc.py +++ b/satpy/readers/li_base_nc.py @@ -210,14 +210,14 @@ def __init__(self, filename, filename_info, filetype_info, cache_handle=True): # Note: the default dict assignment is need to avoid error when using the fake # netcdf4 file handler in mock unit tests: self._xarray_kwargs = getattr(self, "_xarray_kwargs", {}) - self._xarray_kwargs['decode_times'] = False - self._xarray_kwargs['mask_and_scale'] = False + self._xarray_kwargs["decode_times"] = False + self._xarray_kwargs["mask_and_scale"] = False # Processing level that should be set by derived classes. - self.processing_level = filetype_info.get('processing_level', 'L0') + self.processing_level = filetype_info.get("processing_level", "L0") # This class will only provide support for the LI sensor: - self.sensors = {'li'} + self.sensors = {"li"} # Set of dataset names explicitly provided by this file handler: # This set is required to filter the retrieval of datasets later in the @@ -234,19 +234,19 @@ def __init__(self, filename, filename_info, filetype_info, cache_handle=True): # directly here: self.provided_datasets = set() - self.ds_desc = filetype_info['file_desc'] + self.ds_desc = filetype_info["file_desc"] # Store the extra infos available on specific variables: # Write the correct product type here: - self.product_type = self.ds_desc['product_type'] + self.product_type = self.ds_desc["product_type"] logger.debug("Product type is: %s", self.product_type) - self.variable_transforms = self.ds_desc.get('variable_transforms', {}) + self.variable_transforms = self.ds_desc.get("variable_transforms", {}) # Store the pattern for the default swath coordinates: # Note that we should always have this swath coordinates entry now: - self.swath_coordinates = self.ds_desc.get('swath_coordinates', {}) - patterns = self.swath_coordinates.get('variable_patterns', []) - self.swath_coordinates['patterns'] = [re.compile(pstr) for pstr in patterns] + self.swath_coordinates = self.ds_desc.get("swath_coordinates", {}) + patterns = self.swath_coordinates.get("variable_patterns", []) + self.swath_coordinates["patterns"] = [re.compile(pstr) for pstr in patterns] # check if the current product is in an accumulation grid self.prod_in_accumulation_grid = self.is_prod_in_accumulation_grid() @@ -264,8 +264,8 @@ def __init__(self, filename, filename_info, filetype_info, cache_handle=True): # Ordered list of transform operations supported in this file handler: # those transforms are applied if requested in the 'apply_transforms' method below - self.transform_names = ['use_rescaling', 'seconds_to_timedelta', 'milliseconds_to_timedelta', - 'seconds_to_datetime', 'broadcast_to', 'accumulate_index_offset'] + self.transform_names = ["use_rescaling", "seconds_to_timedelta", "milliseconds_to_timedelta", + "seconds_to_datetime", "broadcast_to", "accumulate_index_offset"] # store internal variables self.internal_variables = {} @@ -276,12 +276,12 @@ def __init__(self, filename, filename_info, filetype_info, cache_handle=True): @property def start_time(self): """Get the start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get the end time.""" - return self.filename_info['end_time'] + return self.filename_info["end_time"] @property def sensor_names(self): @@ -290,7 +290,7 @@ def sensor_names(self): def is_prod_in_accumulation_grid(self): """Check if the current product is an accumulated product in geos grid.""" - in_grid = self.swath_coordinates.get('projection', None) == 'mtg_geos_projection' + in_grid = self.swath_coordinates.get("projection", None) == "mtg_geos_projection" return in_grid def get_latlon_names(self): @@ -298,14 +298,14 @@ def get_latlon_names(self): Use default 'latitude' / 'longitude' if not specified. """ - lon_name = self.swath_coordinates.setdefault('longitude', 'longitude') - lat_name = self.swath_coordinates.setdefault('latitude', 'latitude') + lon_name = self.swath_coordinates.setdefault("longitude", "longitude") + lat_name = self.swath_coordinates.setdefault("latitude", "latitude") return lat_name, lon_name def get_projection_config(self): """Retrieve the projection configuration details.""" # We retrieve the projection variable name directly from our swath settings: - proj_var = self.swath_coordinates['projection'] + proj_var = self.swath_coordinates["projection"] geos_proj = self.get_measured_variable(proj_var, fill_value=None) # cast projection attributes to float/str: @@ -317,12 +317,12 @@ def get_projection_config(self): sweep = str(geos_proj.attrs["sweep_angle_axis"]) # use a (semi-major axis) and rf (reverse flattening) to define ellipsoid as recommended by EUM - proj_dict = {'a': major_axis, - 'lon_0': lon_0, - 'h': point_height, + proj_dict = {"a": major_axis, + "lon_0": lon_0, + "h": point_height, "rf": inv_flattening, - 'proj': 'geos', - 'units': 'm', + "proj": "geos", + "units": "m", "sweep": sweep} return proj_dict @@ -330,10 +330,10 @@ def get_projection_config(self): def get_daskified_lon_lat(self, proj_dict): """Get daskified lon and lat array using map_blocks.""" # Get our azimuth/elevation arrays, - azimuth = self.get_measured_variable(self.swath_coordinates['azimuth']) + azimuth = self.get_measured_variable(self.swath_coordinates["azimuth"]) azimuth = self.apply_use_rescaling(azimuth) - elevation = self.get_measured_variable(self.swath_coordinates['elevation']) + elevation = self.get_measured_variable(self.swath_coordinates["elevation"]) elevation = self.apply_use_rescaling(elevation) # Daskify inverse projection computation: @@ -355,9 +355,9 @@ def generate_coords_from_scan_angles(self): # Finally, we should store those arrays as internal variables for later retrieval as # standard datasets: self.internal_variables[lon_name] = xr.DataArray( - da.asarray(lon), dims=['y'], attrs={'standard_name': 'longitude'}) + da.asarray(lon), dims=["y"], attrs={"standard_name": "longitude"}) self.internal_variables[lat_name] = xr.DataArray( - da.asarray(lat), dims=['y'], attrs={'standard_name': 'latitude'}) + da.asarray(lat), dims=["y"], attrs={"standard_name": "latitude"}) def inverse_projection(self, azimuth, elevation, proj_dict): """Compute inverse projection.""" @@ -365,7 +365,7 @@ def inverse_projection(self, azimuth, elevation, proj_dict): projection = Proj(proj_dict) # Retrieve the point height from the projection config: - point_height = proj_dict['h'] + point_height = proj_dict["h"] # Convert scan angles to projection coordinates by multiplying with perspective point height azimuth = azimuth.values * point_height @@ -444,7 +444,7 @@ def apply_fill_value(self, arr, fill_value): if fill_value is not None: if np.isnan(fill_value): fill_value = np.float32(np.nan) - arr = arr.where(arr != arr.attrs.get('_FillValue'), fill_value) + arr = arr.where(arr != arr.attrs.get("_FillValue"), fill_value) return arr def get_variable_search_paths(self, var_paths): @@ -461,25 +461,25 @@ def add_provided_dataset(self, ds_infos): """Add a provided dataset to our internal list.""" # Check if we have extra infos for that variable: # Note that if available we should use the alias name instead here: - vname = ds_infos["alias_name"] if 'alias_name' in ds_infos else ds_infos["variable_name"] + vname = ds_infos["alias_name"] if "alias_name" in ds_infos else ds_infos["variable_name"] self.check_variable_extra_info(ds_infos, vname) # We check here if we should include the default coordinates on that dataset: - if self.swath_coordinates is not None and 'coordinates' not in ds_infos: + if self.swath_coordinates is not None and "coordinates" not in ds_infos: # Check if the variable corresponding to this dataset will match one of the valid patterns # for the swath usage: - if any([p.search(vname) is not None for p in self.swath_coordinates['patterns']]): + if any([p.search(vname) is not None for p in self.swath_coordinates["patterns"]]): # Get the target coordinate names, applying the sector name as needed: lat_coord_name, lon_coord_name = self.get_coordinate_names(ds_infos) # Ensure we do not try to add the coordinates on the coordinates themself: - dname = ds_infos['name'] + dname = ds_infos["name"] if dname != lat_coord_name and dname != lon_coord_name: - ds_infos['coordinates'] = [lon_coord_name, lat_coord_name] + ds_infos["coordinates"] = [lon_coord_name, lat_coord_name] self.dataset_infos.append(ds_infos) - self.provided_datasets.add(ds_infos['name']) + self.provided_datasets.add(ds_infos["name"]) def check_variable_extra_info(self, ds_infos, vname): """Check if we have extra infos for that variable.""" @@ -492,8 +492,8 @@ def check_variable_extra_info(self, ds_infos, vname): def get_coordinate_names(self, ds_infos): """Get the target coordinate names, applying the sector name as needed.""" lat_coord_name, lon_coord_name = self.get_latlon_names() - if 'sector_name' in ds_infos: - sname = ds_infos['sector_name'] + if "sector_name" in ds_infos: + sname = ds_infos["sector_name"] lat_coord_name = lat_coord_name.replace("{sector_name}", sname) lon_coord_name = lon_coord_name.replace("{sector_name}", sname) return lat_coord_name, lon_coord_name @@ -501,7 +501,7 @@ def get_coordinate_names(self, ds_infos): def get_dataset_infos(self, dname): """Retrieve the dataset infos corresponding to one of the registered datasets.""" for dsinfos in self.dataset_infos: - if dsinfos['name'] == dname: + if dsinfos["name"] == dname: return dsinfos # nothing found. @@ -514,15 +514,15 @@ def register_dataset(self, var_name, oc_name=None): ds_name = var_name if oc_name is None else f"{var_name}_{oc_name}_sector" ds_info = { - 'name': ds_name, - 'variable_name': var_name, - 'sensor': 'li', - 'file_type': self.filetype_info['file_type'] + "name": ds_name, + "variable_name": var_name, + "sensor": "li", + "file_type": self.filetype_info["file_type"] } # add the sector name: if oc_name is not None: - ds_info['sector_name'] = oc_name + ds_info["sector_name"] = oc_name self.add_provided_dataset(ds_info) @@ -535,7 +535,7 @@ def register_available_datasets(self): self.dataset_infos = [] # Assign the search paths for this product type: - self.search_paths = self.ds_desc.get('search_paths', []) + self.search_paths = self.ds_desc.get("search_paths", []) # Register our coordinates from azimuth/elevation data # if the product is accumulated @@ -553,17 +553,17 @@ def register_available_datasets(self): def register_variable_datasets(self): """Register all the available raw (i.e. not in sectors).""" - if 'variables' in self.ds_desc: - all_vars = self.ds_desc['variables'] + if "variables" in self.ds_desc: + all_vars = self.ds_desc["variables"] # No sector to handle so we write simple datasets from the variables: for var_name in all_vars: self.register_dataset(var_name) def register_sector_datasets(self): """Register all the available sector datasets.""" - if 'sectors' in self.ds_desc: - sectors = self.ds_desc['sectors'] - sector_vars = self.ds_desc['sector_variables'] + if "sectors" in self.ds_desc: + sectors = self.ds_desc["sectors"] + sector_vars = self.ds_desc["sector_variables"] # We should generate the datasets per sector: for oc_name in sectors: for var_name in sector_vars: @@ -590,16 +590,16 @@ def apply_use_rescaling(self, data_array, ds_info=None): # Check if we have the scaling elements: attribs = data_array.attrs - if 'scale_factor' in attribs or 'scaling_factor' in attribs or 'add_offset' in attribs: + if "scale_factor" in attribs or "scaling_factor" in attribs or "add_offset" in attribs: # TODO remove scaling_factor fallback after issue in NetCDF is fixed - scale_factor = attribs.setdefault('scale_factor', attribs.get('scaling_factor', 1)) - add_offset = attribs.setdefault('add_offset', 0) + scale_factor = attribs.setdefault("scale_factor", attribs.get("scaling_factor", 1)) + add_offset = attribs.setdefault("add_offset", 0) data_array = (data_array * scale_factor) + add_offset # rescale the valid range accordingly - if 'valid_range' in attribs.keys(): - attribs['valid_range'] = attribs['valid_range'] * scale_factor + add_offset + if "valid_range" in attribs.keys(): + attribs["valid_range"] = attribs["valid_range"] * scale_factor + add_offset data_array.attrs.update(attribs) @@ -607,11 +607,11 @@ def apply_use_rescaling(self, data_array, ds_info=None): def apply_broadcast_to(self, data_array, ds_info): """Apply the broadcast_to transform on a given array.""" - ref_var = self.get_transform_reference('broadcast_to', ds_info) + ref_var = self.get_transform_reference("broadcast_to", ds_info) - logger.debug("Broascasting %s to shape %s", ds_info['name'], ref_var.shape) + logger.debug("Broascasting %s to shape %s", ds_info["name"], ref_var.shape) new_array = da.broadcast_to(data_array, ref_var.shape) - dims = data_array.dims if data_array.ndim > 0 else ('y',) + dims = data_array.dims if data_array.ndim > 0 else ("y",) data_array = xr.DataArray(new_array, coords=data_array.coords, dims=dims, name=data_array.name, attrs=data_array.attrs) return data_array @@ -621,42 +621,42 @@ def apply_accumulate_index_offset(self, data_array, ds_info): # retrieve the __index_offset here, or create it if missing: # And keep track of the shared ds_info dict to reset it later in combine_info() self.current_ds_info = ds_info - offset = ds_info.setdefault('__index_offset', 0) + offset = ds_info.setdefault("__index_offset", 0) - ref_var = self.get_transform_reference('accumulate_index_offset', ds_info) + ref_var = self.get_transform_reference("accumulate_index_offset", ds_info) # Apply the current index_offset already reached on the indices we have in the current dataset: data_array = data_array + offset # Now update the __index_offset adding the number of elements in the reference array: - ds_info['__index_offset'] = offset + ref_var.size + ds_info["__index_offset"] = offset + ref_var.size logger.debug("Adding %d elements for index offset, new value is: %d", - ref_var.size, ds_info['__index_offset']) + ref_var.size, ds_info["__index_offset"]) return data_array def apply_seconds_to_datetime(self, data_array, ds_info): """Apply the seconds_to_datetime transform on a given array.""" # Retrieve the epoch timestamp: - epoch_ts = np.datetime64('2000-01-01T00:00:00.000000') + epoch_ts = np.datetime64("2000-01-01T00:00:00.000000") # And add our values as delta times in seconds: # note that we use a resolution of 1ns here: - data_array = epoch_ts + (data_array * 1e9).astype('timedelta64[ns]') + data_array = epoch_ts + (data_array * 1e9).astype("timedelta64[ns]") return data_array def apply_seconds_to_timedelta(self, data_array, _ds_info): """Apply the seconds_to_timedelta transform on a given array.""" # Apply the type conversion in place in the data_array: # note that we use a resolution of 1ns here: - data_array = (data_array * 1e9).astype('timedelta64[ns]') + data_array = (data_array * 1e9).astype("timedelta64[ns]") return data_array def apply_milliseconds_to_timedelta(self, data_array, _ds_info): """Apply the milliseconds_to_timedelta transform on a given array.""" # Apply the type conversion in place in the data_array: # note that we use a resolution of 1ns here: - data_array = (data_array * 1e6).astype('timedelta64[ns]') + data_array = (data_array * 1e6).astype("timedelta64[ns]") return data_array def get_transform_reference(self, transform_name, ds_info): @@ -665,7 +665,7 @@ def get_transform_reference(self, transform_name, ds_info): if "{sector_name}" in var_path: # We really expect to have a sector name for that variable: - var_path = var_path.replace("{sector_name}", ds_info['sector_name']) + var_path = var_path.replace("{sector_name}", ds_info["sector_name"]) # get the variable on that path: ref_var = self.get_measured_variable(var_path) @@ -679,7 +679,7 @@ def apply_transforms(self, data_array, ds_info): for tname in self.transform_names: if tname in ds_info: # Retrieve the transform function: - transform = getattr(self, f'apply_{tname}') + transform = getattr(self, f"apply_{tname}") # Apply the transformation on the dataset: data_array = transform(data_array, ds_info) return data_array @@ -690,7 +690,7 @@ def combine_info(self, all_infos): This is to be able to reset our __index_offset attribute in the shared ds_info currently being updated. """ if self.current_ds_info is not None: - del self.current_ds_info['__index_offset'] + del self.current_ds_info["__index_offset"] self.current_ds_info = None return super().combine_info(all_infos) @@ -698,10 +698,10 @@ def combine_info(self, all_infos): def get_transformed_dataset(self, ds_info): """Retrieve a dataset with all transformations applied on it.""" # Extract base variable name: - vname = ds_info['variable_name'] + vname = ds_info["variable_name"] # Note that the sector name might be None below: - sname = ds_info.get('sector_name', None) + sname = ds_info.get("sector_name", None) # Use the sector name as prefix for the variable path if applicable: var_paths = vname if sname is None else f"{sname}/{vname}" @@ -717,17 +717,17 @@ def validate_array_dimensions(self, data_array, ds_info=None): # in order to potentially support data array combination in a satpy scene: if data_array.ndim == 0: # If we have no dimension, we should force creating one here: - data_array = data_array.expand_dims({'y': 1}) + data_array = data_array.expand_dims({"y": 1}) - data_array = data_array.rename({data_array.dims[0]: 'y'}) + data_array = data_array.rename({data_array.dims[0]: "y"}) return data_array def update_array_attributes(self, data_array, ds_info): """Inject the attributes from the ds_info structure into the final data array, ignoring the internal entries.""" # ignore some internal processing only entries: - ignored_attribs = ["__index_offset", "broadcast_to", 'accumulate_index_offset', - 'seconds_to_timedelta', 'seconds_to_datetime'] + ignored_attribs = ["__index_offset", "broadcast_to", "accumulate_index_offset", + "seconds_to_timedelta", "seconds_to_datetime"] for key, value in ds_info.items(): if key not in ignored_attribs: data_array.attrs[key] = value @@ -738,13 +738,13 @@ def get_dataset(self, dataset_id, ds_info=None): """Get a dataset.""" # Retrieve default infos if missing: if ds_info is None: - ds_info = self.get_dataset_infos(dataset_id['name']) + ds_info = self.get_dataset_infos(dataset_id["name"]) # check for potential error: if ds_info is None: raise KeyError(f"No dataset registered for {dataset_id}") - ds_name = ds_info['name'] + ds_name = ds_info["name"] # In case this dataset name is not explicitly provided by this file handler then we # should simply return None. if ds_name not in self.provided_datasets: diff --git a/satpy/readers/li_l2_nc.py b/satpy/readers/li_l2_nc.py index 569dc2bf51..4fe0826380 100644 --- a/satpy/readers/li_l2_nc.py +++ b/satpy/readers/li_l2_nc.py @@ -71,14 +71,14 @@ def get_area_def(self, dsid): """Compute area definition for a dataset, only supported for accumulated products.""" var_with_swath_coord = self.is_var_with_swath_coord(dsid) if var_with_swath_coord and self.with_area_def: - return get_area_def('mtg_fci_fdss_2km') + return get_area_def("mtg_fci_fdss_2km") - raise NotImplementedError('Area definition is not supported for accumulated products.') + raise NotImplementedError("Area definition is not supported for accumulated products.") def is_var_with_swath_coord(self, dsid): """Check if the variable corresponding to this dataset is listed as variable with swath coordinates.""" # since the patterns are compiled to regex we use the search() method below to find matches - with_swath_coords = any([p.search(dsid['name']) is not None for p in self.swath_coordinates['patterns']]) + with_swath_coords = any([p.search(dsid["name"]) is not None for p in self.swath_coordinates["patterns"]]) return with_swath_coords def get_array_on_fci_grid(self, data_array: xr.DataArray): @@ -92,8 +92,8 @@ def get_array_on_fci_grid(self, data_array: xr.DataArray): # Note that x and y have origin in the south-west corner of the image # and start with index 1. - rows = self.get_measured_variable('y') - cols = self.get_measured_variable('x') + rows = self.get_measured_variable("y") + cols = self.get_measured_variable("x") attrs = data_array.attrs rows, cols = da.compute(rows, cols) @@ -110,7 +110,7 @@ def get_array_on_fci_grid(self, data_array: xr.DataArray): flattened_result[rows * LI_GRID_SHAPE[0] + cols] = data_array # ... reshape to final 2D grid data_2d = da.reshape(flattened_result, LI_GRID_SHAPE) - xarr = xr.DataArray(da.asarray(data_2d, CHUNK_SIZE), dims=('y', 'x')) + xarr = xr.DataArray(da.asarray(data_2d, CHUNK_SIZE), dims=("y", "x")) xarr.attrs = attrs return xarr diff --git a/satpy/readers/maia.py b/satpy/readers/maia.py index 941bf34208..75591c59d5 100644 --- a/satpy/readers/maia.py +++ b/satpy/readers/maia.py @@ -48,69 +48,69 @@ def __init__(self, filename, filename_info, filetype_info): filename, filename_info, filetype_info) self.finfo = filename_info # set the day date part for end_time from the file name - self.finfo['end_time'] = self.finfo['end_time'].replace( - year=self.finfo['start_time'].year, - month=self.finfo['start_time'].month, - day=self.finfo['start_time'].day) - if self.finfo['end_time'] < self.finfo['start_time']: - myday = self.finfo['end_time'].day - self.finfo['end_time'] = self.finfo['end_time'].replace( + self.finfo["end_time"] = self.finfo["end_time"].replace( + year=self.finfo["start_time"].year, + month=self.finfo["start_time"].month, + day=self.finfo["start_time"].day) + if self.finfo["end_time"] < self.finfo["start_time"]: + myday = self.finfo["end_time"].day + self.finfo["end_time"] = self.finfo["end_time"].replace( day=myday + 1) self.selected = None self.read(self.filename) def read(self, filename): """Read the file.""" - self.h5 = h5py.File(filename, 'r') + self.h5 = h5py.File(filename, "r") missing = -9999. - self.Lat = da.from_array(self.h5[u'DATA/Latitude'], chunks=CHUNK_SIZE) / 10000. - self.Lon = da.from_array(self.h5[u'DATA/Longitude'], chunks=CHUNK_SIZE) / 10000. + self.Lat = da.from_array(self.h5[u"DATA/Latitude"], chunks=CHUNK_SIZE) / 10000. + self.Lon = da.from_array(self.h5[u"DATA/Longitude"], chunks=CHUNK_SIZE) / 10000. self.selected = (self.Lon > missing) self.file_content = {} - for key in self.h5['DATA'].keys(): - self.file_content[key] = da.from_array(self.h5[u'DATA/' + key], chunks=CHUNK_SIZE) - for key in self.h5[u'HEADER'].keys(): - self.file_content[key] = self.h5[u'HEADER/' + key][:] + for key in self.h5["DATA"].keys(): + self.file_content[key] = da.from_array(self.h5[u"DATA/" + key], chunks=CHUNK_SIZE) + for key in self.h5[u"HEADER"].keys(): + self.file_content[key] = self.h5[u"HEADER/" + key][:] # Cloud Mask on pixel mask = 2**0 + 2**1 + 2**2 - lst = self.file_content[u'CloudMask'] & mask + lst = self.file_content[u"CloudMask"] & mask lst = lst / 2**0 self.file_content[u"cma"] = lst # Cloud Mask confidence mask = 2**5 + 2**6 - lst = self.file_content[u'CloudMask'] & mask + lst = self.file_content[u"CloudMask"] & mask lst = lst / 2**5 self.file_content[u"cma_conf"] = lst # Cloud Mask Quality mask = 2**3 + 2**4 - lst = self.file_content[u'CloudMask'] & mask + lst = self.file_content[u"CloudMask"] & mask lst = lst / 2**3 - self.file_content[u'cma_qual'] = lst + self.file_content[u"cma_qual"] = lst # Opaque Cloud mask = 2**21 - lst = self.file_content[u'CloudMask'] & mask + lst = self.file_content[u"CloudMask"] & mask lst = lst / 2**21 - self.file_content[u'opaq_cloud'] = lst + self.file_content[u"opaq_cloud"] = lst # land /water Background mask = 2**15 + 2**16 + 2**17 - lst = self.file_content[u'CloudMask'] & mask + lst = self.file_content[u"CloudMask"] & mask lst = lst / 2**15 - self.file_content[u'land_water_background'] = lst + self.file_content[u"land_water_background"] = lst # CT (Actual CloudType) mask = 2**4 + 2**5 + 2**6 + 2**7 + 2**8 - classif = self.file_content[u'CloudType'] & mask + classif = self.file_content[u"CloudType"] & mask classif = classif / 2**4 - self.file_content['ct'] = classif.astype(np.uint8) + self.file_content["ct"] = classif.astype(np.uint8) def get_platform(self, platform): """Get the platform.""" - if self.file_content['sat_id'] in (14,): + if self.file_content["sat_id"] in (14,): return "viirs" else: return "avhrr" @@ -118,26 +118,26 @@ def get_platform(self, platform): @property def start_time(self): """Get the start time.""" - return self.finfo['start_time'] + return self.finfo["start_time"] @property def end_time(self): """Get the end time.""" - return self.finfo['end_time'] + return self.finfo["end_time"] def get_dataset(self, key, info, out=None): """Get a dataset from the file.""" - logger.debug("Reading %s.", key['name']) - values = self.file_content[key['name']] + logger.debug("Reading %s.", key["name"]) + values = self.file_content[key["name"]] selected = np.array(self.selected) - if key['name'] in ("Latitude", "Longitude"): + if key["name"] in ("Latitude", "Longitude"): values = values / 10000. - if key['name'] in ('Tsurf', 'CloudTopPres', 'CloudTopTemp'): + if key["name"] in ("Tsurf", "CloudTopPres", "CloudTopTemp"): goods = values > -9998. selected = np.array(selected & goods) - if key['name'] in ('Tsurf', "Alt_surface", "CloudTopTemp"): + if key["name"] in ("Tsurf", "Alt_surface", "CloudTopTemp"): values = values / 100. - if key['name'] in ("CloudTopPres"): + if key["name"] in ("CloudTopPres"): values = values / 10. else: selected = self.selected @@ -145,10 +145,10 @@ def get_dataset(self, key, info, out=None): fill_value = np.nan - if key['name'] == 'ct': + if key["name"] == "ct": fill_value = 0 - info['_FillValue'] = 0 - ds = DataArray(values, dims=['y', 'x'], attrs=info).where(selected, fill_value) + info["_FillValue"] = 0 + ds = DataArray(values, dims=["y", "x"], attrs=info).where(selected, fill_value) # update dataset info with file_info return ds diff --git a/satpy/readers/meris_nc_sen3.py b/satpy/readers/meris_nc_sen3.py index 61fc761f50..fa69dad2cc 100644 --- a/satpy/readers/meris_nc_sen3.py +++ b/satpy/readers/meris_nc_sen3.py @@ -40,7 +40,7 @@ class NCMERISCal(NCOLCIBase): def __init__(self, filename, filename_info, filetype_info): """Init the meris reader base.""" super(NCMERISCal, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'meris' + self.sensor = "meris" class NCMERISGeo(NCOLCIBase): @@ -49,7 +49,7 @@ class NCMERISGeo(NCOLCIBase): def __init__(self, filename, filename_info, filetype_info): """Init the meris reader base.""" super(NCMERISGeo, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'meris' + self.sensor = "meris" class NCMERIS2(NCOLCI2): @@ -58,24 +58,24 @@ class NCMERIS2(NCOLCI2): def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(NCMERIS2, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'meris' - self.reflectance_prefix = 'M' - self.reflectance_suffix = '_rho_w' + self.sensor = "meris" + self.reflectance_prefix = "M" + self.reflectance_suffix = "_rho_w" def getbitmask(self, wqsf, items=None): """Get the bitmask. Experimental default mask.""" - items = items or ['SEA_ICE', 'MEGLINT', 'HIGHGLINT', - 'HAZE_OVER_WATER', 'WHITECAPS', 'AC_FAIL', 'WHITE_SCATT', - 'LOWRW', 'HIGHRW', 'OUT_OF_RANGE_AAC', 'OUT_OF_SCOPE_AAC', - 'OUT_OF_RANGE_OC_NN', 'OUT_OF_SCOPE_OC_NN', - 'OUT_OF_RANGE_CHL_OC4ME_INPUT', 'OUT_OF_RANGE_CHL_OC4ME'] + items = items or ["SEA_ICE", "MEGLINT", "HIGHGLINT", + "HAZE_OVER_WATER", "WHITECAPS", "AC_FAIL", "WHITE_SCATT", + "LOWRW", "HIGHRW", "OUT_OF_RANGE_AAC", "OUT_OF_SCOPE_AAC", + "OUT_OF_RANGE_OC_NN", "OUT_OF_SCOPE_OC_NN", + "OUT_OF_RANGE_CHL_OC4ME_INPUT", "OUT_OF_RANGE_CHL_OC4ME"] bflags = BitFlags( wqsf, - flag_list=['SEA_ICE', 'MEGLINT', 'HIGHGLINT', 'CASE2_S', 'CASE2_ANOM', - 'HAZE_OVER_WATER', 'WHITECAPS', 'AC_FAIL', 'BPAC_ON', 'WHITE_SCATT', - 'LOWRW', 'HIGHRW', 'OUT_OF_RANGE_AAC', 'OUT_OF_SCOPE_AAC', - 'OUT_OF_RANGE_OC_NN', 'OUT_OF_SCOPE_OC_NN', - 'OUT_OF_RANGE_CHL_OC4ME_INPUT', 'OUT_OF_RANGE_CHL_OC4ME'], + flag_list=["SEA_ICE", "MEGLINT", "HIGHGLINT", "CASE2_S", "CASE2_ANOM", + "HAZE_OVER_WATER", "WHITECAPS", "AC_FAIL", "BPAC_ON", "WHITE_SCATT", + "LOWRW", "HIGHRW", "OUT_OF_RANGE_AAC", "OUT_OF_SCOPE_AAC", + "OUT_OF_RANGE_OC_NN", "OUT_OF_SCOPE_OC_NN", + "OUT_OF_RANGE_CHL_OC4ME_INPUT", "OUT_OF_RANGE_CHL_OC4ME"], ) return reduce(np.logical_or, [bflags[item] for item in items]) @@ -86,7 +86,7 @@ class NCMERISAngles(NCOLCIAngles): def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(NCMERISAngles, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'meris' + self.sensor = "meris" class NCMERISMeteo(NCOLCIMeteo): @@ -95,4 +95,4 @@ class NCMERISMeteo(NCOLCIMeteo): def __init__(self, filename, filename_info, filetype_info): """Init the file handler.""" super(NCMERISMeteo, self).__init__(filename, filename_info, filetype_info) - self.sensor = 'meris' + self.sensor = "meris" diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index b0225ebcb4..905db0654f 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -44,25 +44,25 @@ def _strptime(self, date_attr, time_attr): time = self[time_attr] # "18:27:39.720" # cuts off microseconds because of unknown meaning # is .720 == 720 microseconds or 720000 microseconds - return datetime.strptime(date + " " + time.split('.')[0], "%Y-%m-%d %H:%M:%S") + return datetime.strptime(date + " " + time.split(".")[0], "%Y-%m-%d %H:%M:%S") @property def start_time(self): """Time for first observation.""" - return self._strptime('/attr/Observing Beginning Date', '/attr/Observing Beginning Time') + return self._strptime("/attr/Observing Beginning Date", "/attr/Observing Beginning Time") @property def end_time(self): """Time for final observation.""" - return self._strptime('/attr/Observing Ending Date', '/attr/Observing Ending Time') + return self._strptime("/attr/Observing Ending Date", "/attr/Observing Ending Time") @property def sensor_name(self): """Map sensor name to Satpy 'standard' sensor names.""" - file_sensor = self['/attr/Sensor Identification Code'] + file_sensor = self["/attr/Sensor Identification Code"] sensor = { - 'MERSI': 'mersi-2', - 'MERSI LL': 'mersi-ll', + "MERSI": "mersi-2", + "MERSI LL": "mersi-ll", }.get(file_sensor, file_sensor) return sensor @@ -76,8 +76,8 @@ def _get_single_slope_intercept(self, slope, intercept, cal_index): def _get_coefficients(self, cal_key, cal_index): coeffs = self[cal_key][cal_index] - slope = coeffs.attrs.pop('Slope', None) - intercept = coeffs.attrs.pop('Intercept', None) + slope = coeffs.attrs.pop("Slope", None) + intercept = coeffs.attrs.pop("Intercept", None) if slope is not None: slope, intercept = self._get_single_slope_intercept( slope, intercept, cal_index) @@ -86,37 +86,37 @@ def _get_coefficients(self, cal_key, cal_index): def get_dataset(self, dataset_id, ds_info): """Load data variable and metadata and calibrate if needed.""" - file_key = ds_info.get('file_key', dataset_id['name']) - band_index = ds_info.get('band_index') + file_key = ds_info.get("file_key", dataset_id["name"]) + band_index = ds_info.get("band_index") data = self[file_key] if band_index is not None: data = data[band_index] if data.ndim >= 2: - data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'}) + data = data.rename({data.dims[-2]: "y", data.dims[-1]: "x"}) attrs = data.attrs.copy() # avoid contaminating other band loading attrs.update(ds_info) - if 'rows_per_scan' in self.filetype_info: - attrs.setdefault('rows_per_scan', self.filetype_info['rows_per_scan']) + if "rows_per_scan" in self.filetype_info: + attrs.setdefault("rows_per_scan", self.filetype_info["rows_per_scan"]) data = self._mask_data(data, dataset_id, attrs) - slope = attrs.pop('Slope', None) - intercept = attrs.pop('Intercept', None) - if slope is not None and dataset_id.get('calibration') != 'counts': + slope = attrs.pop("Slope", None) + intercept = attrs.pop("Intercept", None) + if slope is not None and dataset_id.get("calibration") != "counts": if band_index is not None: slope = slope[band_index] intercept = intercept[band_index] data = data * slope + intercept - if dataset_id.get('calibration') == "reflectance": - coeffs = self._get_coefficients(ds_info['calibration_key'], - ds_info['calibration_index']) + if dataset_id.get("calibration") == "reflectance": + coeffs = self._get_coefficients(ds_info["calibration_key"], + ds_info["calibration_index"]) data = coeffs[0] + coeffs[1] * data + coeffs[2] * data ** 2 - elif dataset_id.get('calibration') == "brightness_temperature": - calibration_index = ds_info['calibration_index'] + elif dataset_id.get("calibration") == "brightness_temperature": + calibration_index = ds_info["calibration_index"] # Converts um^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. - wave_number = 1. / (dataset_id['wavelength'][1] / 1e6) + wave_number = 1. / (dataset_id["wavelength"][1] / 1e6) data = self._get_bt_dataset(data, calibration_index, wave_number) @@ -125,29 +125,29 @@ def get_dataset(self, dataset_id, ds_info): for key, val in attrs.items(): # python 3 only if bytes is not str and isinstance(val, bytes): - data.attrs[key] = val.decode('utf8') + data.attrs[key] = val.decode("utf8") data.attrs.update({ - 'platform_name': self['/attr/Satellite Name'], - 'sensor': self.sensor_name, + "platform_name": self["/attr/Satellite Name"], + "sensor": self.sensor_name, }) return data def _mask_data(self, data, dataset_id, attrs): """Mask the data using fill_value and valid_range attributes.""" - fill_value = attrs.pop('FillValue', np.nan) # covered by valid_range - valid_range = attrs.pop('valid_range', None) - if dataset_id.get('calibration') == 'counts': + fill_value = attrs.pop("FillValue", np.nan) # covered by valid_range + valid_range = attrs.pop("valid_range", None) + if dataset_id.get("calibration") == "counts": # preserve integer type of counts if possible - attrs['_FillValue'] = fill_value + attrs["_FillValue"] = fill_value new_fill = fill_value else: new_fill = np.nan if valid_range is not None: # Due to a bug in the valid_range upper limit in the 10.8(24) and 12.0(25) # in the HDF data, this is hardcoded here. - if dataset_id['name'] in ['24', '25'] and valid_range[1] == 4095: + if dataset_id["name"] in ["24", "25"] and valid_range[1] == 4095: valid_range[1] = 25000 # typically bad_values == 65535, saturated == 65534 # dead detector == 65533 @@ -184,13 +184,13 @@ def _get_bt_dataset(self, data, calibration_index, wave_number): data = data.where(data != 0) # additional corrections from the file - if self.sensor_name == 'mersi-2': - corr_coeff_a = float(self['/attr/TBB_Trans_Coefficient_A'][calibration_index]) - corr_coeff_b = float(self['/attr/TBB_Trans_Coefficient_B'][calibration_index]) - elif self.sensor_name == 'mersi-ll': + if self.sensor_name == "mersi-2": + corr_coeff_a = float(self["/attr/TBB_Trans_Coefficient_A"][calibration_index]) + corr_coeff_b = float(self["/attr/TBB_Trans_Coefficient_B"][calibration_index]) + elif self.sensor_name == "mersi-ll": # MERSI-LL stores these coefficients differently try: - coeffs = self['/attr/TBB_Trans_Coefficient'] + coeffs = self["/attr/TBB_Trans_Coefficient"] corr_coeff_a = coeffs[calibration_index] corr_coeff_b = coeffs[calibration_index + N_TOT_IR_CHANS_LL] except KeyError: diff --git a/satpy/readers/mimic_TPW2_nc.py b/satpy/readers/mimic_TPW2_nc.py index d4b7422ab1..8a22002cf4 100644 --- a/satpy/readers/mimic_TPW2_nc.py +++ b/satpy/readers/mimic_TPW2_nc.py @@ -54,8 +54,8 @@ def __init__(self, filename, filename_info, filetype_info): def available_datasets(self, configured_datasets=None): """Get datasets in file matching gelocation shape (lat/lon).""" - lat_shape = self.file_content.get('/dimension/lat') - lon_shape = self.file_content.get('/dimension/lon') + lat_shape = self.file_content.get("/dimension/lat") + lon_shape = self.file_content.get("/dimension/lon") # Read the lat/lon variables? handled_variables = set() @@ -67,9 +67,9 @@ def available_datasets(self, configured_datasets=None): if is_avail is not None: yield is_avail, ds_info - var_name = ds_info.get('file_key', ds_info['name']) + var_name = ds_info.get("file_key", ds_info["name"]) # logger.debug("Evaluating previously configured variable: %s", var_name) - matches = self.file_type_matches(ds_info['file_type']) + matches = self.file_type_matches(ds_info["file_type"]) # we can confidently say that we can provide this dataset and can # provide more info if matches and var_name in self: @@ -98,35 +98,35 @@ def available_datasets(self, configured_datasets=None): handled_variables.add(var_name) # Create new ds_info object new_info = { - 'name': var_name, - 'file_key': var_name, - 'file_type': self.filetype_info['file_type'], + "name": var_name, + "file_key": var_name, + "file_type": self.filetype_info["file_type"], } logger.debug(var_name) yield True, new_info def get_dataset(self, ds_id, info): """Load dataset designated by the given key from file.""" - logger.debug("Getting data for: %s", ds_id['name']) - file_key = info.get('file_key', ds_id['name']) + logger.debug("Getting data for: %s", ds_id["name"]) + file_key = info.get("file_key", ds_id["name"]) data = np.flipud(self[file_key]) - data = xr.DataArray(data, dims=['y', 'x']) + data = xr.DataArray(data, dims=["y", "x"]) data.attrs = self.get_metadata(data, info) - if 'lon' in data.dims: - data.rename({'lon': 'x'}) - if 'lat' in data.dims: - data.rename({'lat': 'y'}) + if "lon" in data.dims: + data.rename({"lon": "x"}) + if "lat" in data.dims: + data.rename({"lat": "y"}) return data def get_area_def(self, dsid): """Flip data up/down and define equirectangular AreaDefintion.""" - flip_lat = np.flipud(self['latArr']) - latlon = np.meshgrid(self['lonArr'], flip_lat) + flip_lat = np.flipud(self["latArr"]) + latlon = np.meshgrid(self["lonArr"], flip_lat) - width = self['lonArr/shape'][0] - height = self['latArr/shape'][0] + width = self["lonArr/shape"][0] + height = self["latArr/shape"][0] lower_left_x = latlon[0][height-1][0] lower_left_y = latlon[1][height-1][0] @@ -136,9 +136,9 @@ def get_area_def(self, dsid): area_extent = (lower_left_x, lower_left_y, upper_right_x, upper_right_y) description = "MIMIC TPW WGS84" - area_id = 'mimic' - proj_id = 'World Geodetic System 1984' - projection = 'EPSG:4326' + area_id = "mimic" + proj_id = "World Geodetic System 1984" + projection = "EPSG:4326" area_def = AreaDefinition(area_id, description, proj_id, projection, width, height, area_extent, ) return area_def @@ -148,24 +148,24 @@ def get_metadata(self, data, info): metadata.update(data.attrs) metadata.update(info) metadata.update({ - 'platform_shortname': 'aggregated microwave', - 'sensor': 'mimic', - 'start_time': self.start_time, - 'end_time': self.end_time, + "platform_shortname": "aggregated microwave", + "sensor": "mimic", + "start_time": self.start_time, + "end_time": self.end_time, }) - metadata.update(self[info.get('file_key')].variable.attrs) + metadata.update(self[info.get("file_key")].variable.attrs) return metadata @property def start_time(self): """Start timestamp of the dataset determined from yaml.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """End timestamp of the dataset same as start_time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def sensor_name(self): diff --git a/satpy/readers/mirs.py b/satpy/readers/mirs.py index de02b1dc36..1ee0912b0f 100644 --- a/satpy/readers/mirs.py +++ b/satpy/readers/mirs.py @@ -64,7 +64,7 @@ } SENSOR = {"n18": amsu, "n19": amsu, - "n20": 'atms', + "n20": "atms", "np": amsu, "m1": amsu, "m2": amsu, @@ -173,11 +173,11 @@ def get_coeff_by_sfc(coeff_fn, bt_data, idx): def limb_correct_atms_bt(bt_data, surf_type_mask, coeff_fns, ds_info): """Gather data needed for limb correction.""" - idx = ds_info['channel_index'] + idx = ds_info["channel_index"] LOG.info("Starting ATMS Limb Correction...") - sea_bt = get_coeff_by_sfc(coeff_fns['sea'], bt_data, idx) - land_bt = get_coeff_by_sfc(coeff_fns['land'], bt_data, idx) + sea_bt = get_coeff_by_sfc(coeff_fns["sea"], bt_data, idx) + land_bt = get_coeff_by_sfc(coeff_fns["land"], bt_data, idx) LOG.info("Finishing limb correction") is_sea = (surf_type_mask == 0) @@ -217,8 +217,8 @@ def __init__(self, filename, filename_info, filetype_info, decode_cf=True, mask_and_scale=False, decode_coords=True, - chunks={'Field_of_view': CHUNK_SIZE, - 'Scanline': CHUNK_SIZE}) + chunks={"Field_of_view": CHUNK_SIZE, + "Scanline": CHUNK_SIZE}) # y,x is used in satpy, bands rather than channel using in xrimage self.nc = self.nc.rename_dims({"Scanline": "y", "Field_of_view": "x"}) @@ -232,13 +232,13 @@ def __init__(self, filename, filename_info, filetype_info, @property def platform_shortname(self): """Get platform shortname.""" - return self.filename_info['platform_shortname'] + return self.filename_info["platform_shortname"] @property def _get_platform_name(self): """Get platform name.""" try: - res = PLATFORMS[self.filename_info['platform_shortname'].lower()] + res = PLATFORMS[self.filename_info["platform_shortname"].lower()] except KeyError: res = "mirs" return res.lower() @@ -296,13 +296,13 @@ def force_time(self, key): @property def _get_coeff_filenames(self): """Retrieve necessary files for coefficients if needed.""" - coeff_fn = {'sea': None, 'land': None} + coeff_fn = {"sea": None, "land": None} if self.platform_name == "noaa-20": - coeff_fn['land'] = retrieve("readers/limbcoef_atmsland_noaa20.txt") - coeff_fn['sea'] = retrieve("readers/limbcoef_atmssea_noaa20.txt") - if self.platform_name == 'npp': - coeff_fn['land'] = retrieve("readers/limbcoef_atmsland_snpp.txt") - coeff_fn['sea'] = retrieve("readers/limbcoef_atmssea_snpp.txt") + coeff_fn["land"] = retrieve("readers/limbcoef_atmsland_noaa20.txt") + coeff_fn["sea"] = retrieve("readers/limbcoef_atmssea_noaa20.txt") + if self.platform_name == "npp": + coeff_fn["land"] = retrieve("readers/limbcoef_atmsland_snpp.txt") + coeff_fn["sea"] = retrieve("readers/limbcoef_atmssea_snpp.txt") return coeff_fn @@ -311,10 +311,10 @@ def update_metadata(self, ds_info): metadata = {} metadata.update(ds_info) metadata.update({ - 'sensor': self.sensor, - 'platform_name': self.platform_name, - 'start_time': self.start_time, - 'end_time': self.end_time, + "sensor": self.sensor, + "platform_name": self.platform_name, + "start_time": self.start_time, + "end_time": self.end_time, }) return metadata @@ -325,9 +325,9 @@ def _nan_for_dtype(data_arr_dtype): if data_arr_dtype.type == np.float32: return np.float32(np.nan) if np.issubdtype(data_arr_dtype, np.timedelta64): - return np.timedelta64('NaT') + return np.timedelta64("NaT") if np.issubdtype(data_arr_dtype, np.datetime64): - return np.datetime64('NaT') + return np.datetime64("NaT") return np.nan @staticmethod @@ -375,19 +375,19 @@ def apply_attributes(self, data, ds_info): ds_info.update(data.attrs) # special cases - if ds_info['name'] in ["latitude", "longitude"]: + if ds_info["name"] in ["latitude", "longitude"]: ds_info["standard_name"] = ds_info.get("standard_name", - ds_info['name']) + ds_info["name"]) # try to assign appropriate units (if "Kelvin" covert to K) units_convert = {"Kelvin": "K"} - data_unit = ds_info.get('units', None) - ds_info['units'] = units_convert.get(data_unit, data_unit) + data_unit = ds_info.get("units", None) + ds_info["units"] = units_convert.get(data_unit, data_unit) - scale = ds_info.pop('scale_factor', 1.0) - offset = ds_info.pop('add_offset', 0.) + scale = ds_info.pop("scale_factor", 1.0) + offset = ds_info.pop("add_offset", 0.) fill_value = ds_info.pop("_FillValue", global_attr_fill) - valid_range = ds_info.pop('valid_range', None) + valid_range = ds_info.pop("valid_range", None) data = self._scale_data(data, scale, offset) data = self._fill_data(data, fill_value, scale, offset) @@ -399,14 +399,14 @@ def apply_attributes(self, data, ds_info): def get_dataset(self, ds_id, ds_info): """Get datasets.""" - if 'dependencies' in ds_info.keys(): - idx = ds_info['channel_index'] - data = self['BT'] + if "dependencies" in ds_info.keys(): + idx = ds_info["channel_index"] + data = self["BT"] data = data.rename(new_name_or_name_dict=ds_info["name"]) data, ds_info = self.apply_attributes(data, ds_info) if self.sensor.lower() == "atms" and self.limb_correction: - sfc_type_mask = self['Sfc_type'] + sfc_type_mask = self["Sfc_type"] data = limb_correct_atms_bt(data, sfc_type_mask, self._get_coeff_filenames, ds_info) @@ -416,7 +416,7 @@ def get_dataset(self, ds_id, ds_info): LOG.info("No Limb Correction applied.") data = data[:, :, idx] else: - data = self[ds_id['name']] + data = self[ds_id["name"]] data, ds_info = self.apply_attributes(data, ds_info) data.attrs = self.update_metadata(ds_info) @@ -440,24 +440,24 @@ def available_datasets(self, configured_datasets=None): continue yaml_info = {} - if self.file_type_matches(ds_info['file_type']): - handled_vars.add(ds_info['name']) + if self.file_type_matches(ds_info["file_type"]): + handled_vars.add(ds_info["name"]) yaml_info = ds_info - if ds_info['name'] == 'BT': + if ds_info["name"] == "BT": yield from self._available_btemp_datasets(yaml_info) yield True, ds_info yield from self._available_new_datasets(handled_vars) def _count_channel_repeat_number(self): """Count channel/polarization pair repetition.""" - freq = self.nc.coords.get('Freq', self.nc.get('Freq')) - polo = self.nc['Polo'] + freq = self.nc.coords.get("Freq", self.nc.get("Freq")) + polo = self.nc["Polo"] chn_total = Counter() normals = [] for idx, (f, p) in enumerate(zip(freq, polo)): normal_f = str(int(f)) - normal_p = 'v' if p == POLO_V else 'h' + normal_p = "v" if p == POLO_V else "h" chn_total[normal_f + normal_p] += 1 normals.append((idx, f, p, normal_f, normal_p)) @@ -471,7 +471,7 @@ def _available_btemp_datasets(self, yaml_info): for idx, _f, _p, normal_f, normal_p in normals: chn_cnt[normal_f + normal_p] += 1 p_count = str(chn_cnt[normal_f + normal_p] - if chn_total[normal_f + normal_p] > 1 else '') + if chn_total[normal_f + normal_p] > 1 else "") new_name = "btemp_{}{}{}".format(normal_f, normal_p, p_count) @@ -479,22 +479,22 @@ def _available_btemp_datasets(self, yaml_info): desc_bt = desc_bt.format(idx, normal_f, normal_p, p_count) ds_info = yaml_info.copy() ds_info.update({ - 'file_type': self.filetype_info['file_type'], - 'name': new_name, - 'description': desc_bt, - 'channel_index': idx, - 'frequency': "{}GHz".format(normal_f), - 'polarization': normal_p, - 'dependencies': ('BT', 'Sfc_type'), - 'coordinates': ['longitude', 'latitude'] + "file_type": self.filetype_info["file_type"], + "name": new_name, + "description": desc_bt, + "channel_index": idx, + "frequency": "{}GHz".format(normal_f), + "polarization": normal_p, + "dependencies": ("BT", "Sfc_type"), + "coordinates": ["longitude", "latitude"] }) yield True, ds_info def _get_ds_info_for_data_arr(self, var_name): ds_info = { - 'file_type': self.filetype_info['file_type'], - 'name': var_name, - 'coordinates': ["longitude", "latitude"] + "file_type": self.filetype_info["file_type"], + "name": var_name, + "coordinates": ["longitude", "latitude"] } return ds_info @@ -524,7 +524,7 @@ def __getitem__(self, item): data = self.nc[item] # 'Freq' dimension causes issues in other processing - if 'Freq' in data.coords: - data = data.drop_vars('Freq') + if "Freq" in data.coords: + data = data.drop_vars("Freq") return data diff --git a/satpy/readers/modis_l1b.py b/satpy/readers/modis_l1b.py index 1d0e209d57..8280b30065 100644 --- a/satpy/readers/modis_l1b.py +++ b/satpy/readers/modis_l1b.py @@ -90,13 +90,13 @@ class HDFEOSBandReader(HDFEOSBaseFileReader): "H": 500} res_to_possible_variable_names = { - 1000: ['EV_250_Aggr1km_RefSB', - 'EV_500_Aggr1km_RefSB', - 'EV_1KM_RefSB', - 'EV_1KM_Emissive'], - 500: ['EV_250_Aggr500_RefSB', - 'EV_500_RefSB'], - 250: ['EV_250_RefSB'], + 1000: ["EV_250_Aggr1km_RefSB", + "EV_500_Aggr1km_RefSB", + "EV_1KM_RefSB", + "EV_1KM_Emissive"], + 500: ["EV_250_Aggr500_RefSB", + "EV_500_RefSB"], + 250: ["EV_250_RefSB"], } def __init__(self, filename, filename_info, filetype_info, mask_saturated=True, **kwargs): @@ -104,13 +104,13 @@ def __init__(self, filename, filename_info, filetype_info, mask_saturated=True, super().__init__(filename, filename_info, filetype_info, **kwargs) self._mask_saturated = mask_saturated - ds = self.metadata['INVENTORYMETADATA'][ - 'COLLECTIONDESCRIPTIONCLASS']['SHORTNAME']['VALUE'] + ds = self.metadata["INVENTORYMETADATA"][ + "COLLECTIONDESCRIPTIONCLASS"]["SHORTNAME"]["VALUE"] self.resolution = self.res[ds[-3]] def get_dataset(self, key, info): """Read data from file and return the corresponding projectables.""" - if self.resolution != key['resolution']: + if self.resolution != key["resolution"]: return var_name, band_index = self._get_band_variable_name_and_index(key["name"]) subdata = self.sd.select(var_name) @@ -118,8 +118,8 @@ def get_dataset(self, key, info): uncertainty = self.sd.select(var_name + "_Uncert_Indexes") chunks = self._chunks_for_variable(subdata) array = xr.DataArray(from_sds(subdata, chunks=chunks)[band_index, :, :], - dims=['y', 'x']).astype(np.float32) - valid_range = var_attrs['valid_range'] + dims=["y", "x"]).astype(np.float32) + valid_range = var_attrs["valid_range"] valid_min = np.float32(valid_range[0]) valid_max = np.float32(valid_range[1]) if not self._mask_saturated: @@ -219,24 +219,24 @@ def _mask_uncertain_pixels(self, array, uncertainty, band_index): return array def _calibrate_data(self, key, info, array, var_attrs, index): - if key['calibration'] == 'brightness_temperature': - projectable = calibrate_bt(array, var_attrs, index, key['name']) - info.setdefault('units', 'K') - info.setdefault('standard_name', 'toa_brightness_temperature') - elif key['calibration'] == 'reflectance': + if key["calibration"] == "brightness_temperature": + projectable = calibrate_bt(array, var_attrs, index, key["name"]) + info.setdefault("units", "K") + info.setdefault("standard_name", "toa_brightness_temperature") + elif key["calibration"] == "reflectance": projectable = calibrate_refl(array, var_attrs, index) - info.setdefault('units', '%') - info.setdefault('standard_name', - 'toa_bidirectional_reflectance') - elif key['calibration'] == 'radiance': + info.setdefault("units", "%") + info.setdefault("standard_name", + "toa_bidirectional_reflectance") + elif key["calibration"] == "radiance": projectable = calibrate_radiance(array, var_attrs, index) - info.setdefault('units', var_attrs.get('radiance_units')) - info.setdefault('standard_name', - 'toa_outgoing_radiance_per_unit_wavelength') - elif key['calibration'] == 'counts': + info.setdefault("units", var_attrs.get("radiance_units")) + info.setdefault("standard_name", + "toa_outgoing_radiance_per_unit_wavelength") + elif key["calibration"] == "counts": projectable = calibrate_counts(array, var_attrs, index) - info.setdefault('units', 'counts') - info.setdefault('standard_name', 'counts') # made up + info.setdefault("units", "counts") + info.setdefault("standard_name", "counts") # made up else: raise ValueError("Unknown calibration for " "key: {}".format(key)) @@ -254,7 +254,7 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): def get_dataset(self, key, info): """Get the dataset.""" - if key['name'] in HDFEOSGeoReader.DATASET_NAMES: + if key["name"] in HDFEOSGeoReader.DATASET_NAMES: return HDFEOSGeoReader.get_dataset(self, key, info) return HDFEOSBandReader.get_dataset(self, key, info) diff --git a/satpy/readers/modis_l2.py b/satpy/readers/modis_l2.py index ac1522dfe9..8fdf1c69bb 100644 --- a/satpy/readers/modis_l2.py +++ b/satpy/readers/modis_l2.py @@ -113,30 +113,30 @@ def _select_hdf_dataset(self, hdf_dataset_name, byte_dimension): dataset = self.sd.select(hdf_dataset_name) dask_arr = from_sds(dataset, chunks=CHUNK_SIZE) attrs = dataset.attributes() - dims = ['y', 'x'] + dims = ["y", "x"] if byte_dimension == 0: - dims = ['i', 'y', 'x'] + dims = ["i", "y", "x"] dask_arr = dask_arr.astype(np.uint8) elif byte_dimension == 2: - dims = ['y', 'x', 'i'] + dims = ["y", "x", "i"] dask_arr = dask_arr.astype(np.uint8) dataset = xr.DataArray(dask_arr, dims=dims, attrs=attrs) - if 'i' in dataset.dims: + if "i" in dataset.dims: # Reorder dimensions for consistency - dataset = dataset.transpose('i', 'y', 'x') + dataset = dataset.transpose("i", "y", "x") return dataset def get_dataset(self, dataset_id, dataset_info): """Get DataArray for specified dataset.""" - dataset_name = dataset_id['name'] + dataset_name = dataset_id["name"] if self.is_geo_loadable_dataset(dataset_name): return HDFEOSGeoReader.get_dataset(self, dataset_id, dataset_info) - dataset_name_in_file = dataset_info['file_key'] + dataset_name_in_file = dataset_info["file_key"] if self.is_imapp_mask_byte1: - dataset_name_in_file = dataset_info.get('imapp_file_key', dataset_name_in_file) + dataset_name_in_file = dataset_info.get("imapp_file_key", dataset_name_in_file) # The dataset asked correspond to a given set of bits of the HDF EOS dataset - if 'byte' in dataset_info and 'byte_dimension' in dataset_info: + if "byte" in dataset_info and "byte_dimension" in dataset_info: dataset = self._extract_and_mask_category_dataset(dataset_id, dataset_info, dataset_name_in_file) else: # No byte manipulation required @@ -147,39 +147,39 @@ def get_dataset(self, dataset_id, dataset_info): def _extract_and_mask_category_dataset(self, dataset_id, dataset_info, var_name): # what dimension is per-byte - byte_dimension = None if self.is_imapp_mask_byte1 else dataset_info['byte_dimension'] + byte_dimension = None if self.is_imapp_mask_byte1 else dataset_info["byte_dimension"] dataset = self._select_hdf_dataset(var_name, byte_dimension) # category products always have factor=1/offset=0 so don't apply them # also remove them so they don't screw up future satpy processing - dataset.attrs.pop('scale_factor', None) - dataset.attrs.pop('add_offset', None) + dataset.attrs.pop("scale_factor", None) + dataset.attrs.pop("add_offset", None) # Don't do this byte work if we are using the IMAPP mask_byte1 file if self.is_imapp_mask_byte1: return dataset dataset = _extract_byte_mask(dataset, - dataset_info['byte'], - dataset_info['bit_start'], - dataset_info['bit_count']) + dataset_info["byte"], + dataset_info["bit_start"], + dataset_info["bit_count"]) dataset = self._mask_with_quality_assurance_if_needed(dataset, dataset_info, dataset_id) return dataset def _mask_with_quality_assurance_if_needed(self, dataset, dataset_info, dataset_id): - if not dataset_info.get('quality_assurance', False): + if not dataset_info.get("quality_assurance", False): return dataset # Get quality assurance dataset recursively quality_assurance_dataset_id = dataset_id.from_dict( - dict(name='quality_assurance', resolution=1000) + dict(name="quality_assurance", resolution=1000) ) quality_assurance_dataset_info = { - 'name': 'quality_assurance', - 'resolution': 1000, - 'byte_dimension': 2, - 'byte': 0, - 'bit_start': 0, - 'bit_count': 1, - 'file_key': 'Quality_Assurance' + "name": "quality_assurance", + "resolution": 1000, + "byte_dimension": 2, + "byte": 0, + "bit_start": 0, + "bit_count": 1, + "file_key": "Quality_Assurance" } quality_assurance = self.get_dataset( quality_assurance_dataset_id, quality_assurance_dataset_info @@ -245,7 +245,7 @@ def _bits_strip(bit_start, bit_count, value): value : int Number from which to extract the bits - Returns + Returns: ------- int Value of the extracted bits diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 517e096db8..1131e40a96 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -49,10 +49,10 @@ logger = logging.getLogger(__name__) CHUNK_SIZE = get_legacy_chunk_size() -PLATFORMS = {'S2A': "Sentinel-2A", - 'S2B': "Sentinel-2B", - 'S2C': "Sentinel-2C", - 'S2D': "Sentinel-2D"} +PLATFORMS = {"S2A": "Sentinel-2A", + "S2B": "Sentinel-2B", + "S2C": "Sentinel-2C", + "S2D": "Sentinel-2D"} class SAFEMSIL1C(BaseFileHandler): @@ -63,23 +63,23 @@ def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_s super(SAFEMSIL1C, self).__init__(filename, filename_info, filetype_info) del mask_saturated - self._start_time = filename_info['observation_time'] - self._end_time = filename_info['observation_time'] - self._channel = filename_info['band_name'] + self._start_time = filename_info["observation_time"] + self._end_time = filename_info["observation_time"] + self._channel = filename_info["band_name"] self._tile_mda = tile_mda self._mda = mda - self.platform_name = PLATFORMS[filename_info['fmission_id']] + self.platform_name = PLATFORMS[filename_info["fmission_id"]] def get_dataset(self, key, info): """Load a dataset.""" - if self._channel != key['name']: + if self._channel != key["name"]: return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) proj = self._read_from_file(key) proj.attrs = info.copy() - proj.attrs['units'] = '%' - proj.attrs['platform_name'] = self.platform_name + proj.attrs["units"] = "%" + proj.attrs["platform_name"] = self.platform_name return proj def _read_from_file(self, key): @@ -102,7 +102,7 @@ def end_time(self): def get_area_def(self, dsid): """Get the area def.""" - if self._channel != dsid['name']: + if self._channel != dsid["name"]: return return self._tile_mda.get_area_def(dsid) @@ -113,11 +113,11 @@ class SAFEMSIXMLMetadata(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info, mask_saturated=True): """Init the reader.""" super().__init__(filename, filename_info, filetype_info) - self._start_time = filename_info['observation_time'] - self._end_time = filename_info['observation_time'] + self._start_time = filename_info["observation_time"] + self._end_time = filename_info["observation_time"] self.root = ET.parse(self.filename) - self.tile = filename_info['dtile_number'] - self.platform_name = PLATFORMS[filename_info['fmission_id']] + self.tile = filename_info["dtile_number"] + self.platform_name = PLATFORMS[filename_info["fmission_id"]] self.mask_saturated = mask_saturated import bottleneck # noqa import geotiepoints # noqa @@ -138,7 +138,7 @@ class SAFEMSIMDXML(SAFEMSIXMLMetadata): def calibrate_to_reflectances(self, data, band_name): """Calibrate *data* using the radiometric information for the metadata.""" - quantification = int(self.root.find('.//QUANTIFICATION_VALUE').text) + quantification = int(self.root.find(".//QUANTIFICATION_VALUE").text) data = self._sanitize_data(data) return (data + self.band_offset(band_name)) / quantification * 100 @@ -163,14 +163,14 @@ def _band_index(self, band): @cached_property def band_indices(self): """Get the band indices from the metadata.""" - spectral_info = self.root.findall('.//Spectral_Information') + spectral_info = self.root.findall(".//Spectral_Information") band_indices = {spec.attrib["physicalBand"]: int(spec.attrib["bandId"]) for spec in spectral_info} return band_indices @cached_property def band_offsets(self): """Get the band offsets from the metadata.""" - offsets = self.root.find('.//Radiometric_Offset_List') + offsets = self.root.find(".//Radiometric_Offset_List") if offsets is not None: band_offsets = {int(off.attrib["band_id"]): float(off.text) for off in offsets} else: @@ -180,7 +180,7 @@ def band_offsets(self): @cached_property def special_values(self): """Get the special values from the metadata.""" - special_values = self.root.findall('.//Special_Values') + special_values = self.root.findall(".//Special_Values") special_values_dict = {value[0].text: float(value[1].text) for value in special_values} return special_values_dict @@ -214,11 +214,11 @@ def physical_gains(self): def _fill_swath_edges(angles): """Fill gaps at edges of swath.""" - darr = xr.DataArray(angles, dims=['y', 'x']) - darr = darr.bfill('x') - darr = darr.ffill('x') - darr = darr.bfill('y') - darr = darr.ffill('y') + darr = xr.DataArray(angles, dims=["y", "x"]) + darr = darr.bfill("x") + darr = darr.ffill("x") + darr = darr.bfill("y") + darr = darr.ffill("y") angles = darr.data return angles @@ -229,12 +229,12 @@ class SAFEMSITileMDXML(SAFEMSIXMLMetadata): def __init__(self, filename, filename_info, filetype_info, mask_saturated=True): """Init the reader.""" super().__init__(filename, filename_info, filetype_info, mask_saturated) - self.geocoding = self.root.find('.//Tile_Geocoding') + self.geocoding = self.root.find(".//Tile_Geocoding") def get_area_def(self, dsid): """Get the area definition of the dataset.""" - area_extent = self._area_extent(dsid['resolution']) - cols, rows = self._shape(dsid['resolution']) + area_extent = self._area_extent(dsid["resolution"]) + cols, rows = self._shape(dsid["resolution"]) area = geometry.AreaDefinition( self.tile, "On-the-fly area", @@ -249,16 +249,16 @@ def get_area_def(self, dsid): def projection(self): """Get the geographic projection.""" from pyproj import CRS - epsg = self.geocoding.find('HORIZONTAL_CS_CODE').text + epsg = self.geocoding.find("HORIZONTAL_CS_CODE").text return CRS(epsg) def _area_extent(self, resolution): cols, rows = self._shape(resolution) geoposition = self.geocoding.find('Geoposition[@resolution="' + str(resolution) + '"]') - ulx = float(geoposition.find('ULX').text) - uly = float(geoposition.find('ULY').text) - xdim = float(geoposition.find('XDIM').text) - ydim = float(geoposition.find('YDIM').text) + ulx = float(geoposition.find("ULX").text) + uly = float(geoposition.find("ULY").text) + xdim = float(geoposition.find("XDIM").text) + ydim = float(geoposition.find("YDIM").text) area_extent = (ulx, uly + rows * ydim, ulx + cols * xdim, uly) return area_extent @@ -292,30 +292,30 @@ def interpolate_angles(self, angles, resolution): def _get_coarse_dataset(self, key, info): """Get the coarse dataset refered to by `key` from the XML data.""" - angles = self.root.find('.//Tile_Angles') - if key['name'] in ['solar_zenith_angle', 'solar_azimuth_angle']: + angles = self.root.find(".//Tile_Angles") + if key["name"] in ["solar_zenith_angle", "solar_azimuth_angle"]: angles = self._get_solar_angles(angles, info) - elif key['name'] in ['satellite_zenith_angle', 'satellite_azimuth_angle']: + elif key["name"] in ["satellite_zenith_angle", "satellite_azimuth_angle"]: angles = self._get_satellite_angles(angles, info) else: angles = None return angles def _get_solar_angles(self, angles, info): - angles = self._get_values_from_tag(angles, info['xml_tag']) + angles = self._get_values_from_tag(angles, info["xml_tag"]) return angles @staticmethod def _get_values_from_tag(xml_tree, xml_tag): - elts = xml_tree.findall(xml_tag + '/Values_List/VALUES') + elts = xml_tree.findall(xml_tag + "/Values_List/VALUES") return np.array([[val for val in elt.text.split()] for elt in elts], dtype=np.float64) def _get_satellite_angles(self, angles, info): arrays = [] - elts = angles.findall(info['xml_tag'] + '[@bandId="1"]') + elts = angles.findall(info["xml_tag"] + '[@bandId="1"]') for elt in elts: - arrays.append(self._get_values_from_tag(elt, info['xml_item'])) + arrays.append(self._get_values_from_tag(elt, info["xml_item"])) angles = np.nanmean(np.dstack(arrays), -1) return angles @@ -327,10 +327,10 @@ def get_dataset(self, key, info): angles = _fill_swath_edges(angles) - res = self.interpolate_angles(angles, key['resolution']) + res = self.interpolate_angles(angles, key["resolution"]) - proj = xr.DataArray(res, dims=['y', 'x']) + proj = xr.DataArray(res, dims=["y", "x"]) proj.attrs = info.copy() - proj.attrs['units'] = 'degrees' - proj.attrs['platform_name'] = self.platform_name + proj.attrs["units"] = "degrees" + proj.attrs["platform_name"] = self.platform_name return proj diff --git a/satpy/readers/msu_gsa_l1b.py b/satpy/readers/msu_gsa_l1b.py index df06239b43..c4e45aa333 100644 --- a/satpy/readers/msu_gsa_l1b.py +++ b/satpy/readers/msu_gsa_l1b.py @@ -37,7 +37,7 @@ class MSUGSAFileHandler(HDF5FileHandler): @property def start_time(self): """Time for timeslot scan start.""" - dtstr = self['/attr/timestamp_without_timezone'] + dtstr = self["/attr/timestamp_without_timezone"] return datetime.strptime(dtstr, "%Y-%m-%dT%H:%M:%S") @property @@ -47,65 +47,65 @@ def satellite_altitude(self): There is no documentation but this appears to be height above surface in meters. """ - return float(self['/attr/satellite_observation_point_height']) + return float(self["/attr/satellite_observation_point_height"]) @property def satellite_latitude(self): """Satellite latitude at time of scan.""" - return float(self['/attr/satellite_observation_point_latitude']) + return float(self["/attr/satellite_observation_point_latitude"]) @property def satellite_longitude(self): """Satellite longitude at time of scan.""" - return float(self['/attr/satellite_observation_point_longitude']) + return float(self["/attr/satellite_observation_point_longitude"]) @property def sensor_name(self): """Sensor name is hardcoded.""" - sensor = 'msu_gsa' + sensor = "msu_gsa" return sensor @property def platform_name(self): """Platform name is also hardcoded.""" - platform = 'Arctica-M-N1' + platform = "Arctica-M-N1" return platform @staticmethod def _apply_scale_offset(in_data): """Apply the scale and offset to data.""" - scl = in_data.attrs['scale'] - off = in_data.attrs['offset'] + scl = in_data.attrs["scale"] + off = in_data.attrs["offset"] return in_data * scl + off def get_dataset(self, dataset_id, ds_info): """Load data variable and metadata and calibrate if needed.""" - file_key = ds_info.get('file_key', dataset_id['name']) + file_key = ds_info.get("file_key", dataset_id["name"]) data = self[file_key] attrs = data.attrs.copy() # avoid contaminating other band loading attrs.update(ds_info) # The fill value also needs to be applied - fill_val = attrs.pop('fill_value') + fill_val = attrs.pop("fill_value") data = data.where(data != fill_val, np.nan) # Data has a scale and offset that we must apply data = self._apply_scale_offset(data) # Data is given as radiance values, we must convert if we want reflectance - if dataset_id.get('calibration') == "reflectance": - solconst = float(attrs.pop('F_solar_constant')) + if dataset_id.get("calibration") == "reflectance": + solconst = float(attrs.pop("F_solar_constant")) data = np.pi * data / solconst # Satpy expects reflectance values in 0-100 range data = data * 100. data.attrs = attrs data.attrs.update({ - 'platform_name': self.platform_name, - 'sensor': self.sensor_name, - 'sat_altitude': self.satellite_altitude, - 'sat_latitude': self.satellite_latitude, - 'sat_longitude': self.satellite_longitude, + "platform_name": self.platform_name, + "sensor": self.sensor_name, + "sat_altitude": self.satellite_altitude, + "sat_latitude": self.satellite_latitude, + "sat_longitude": self.satellite_longitude, }) return data diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index 121e7d7d1b..9a309a0bb8 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -43,8 +43,8 @@ name in the reader as in the netCDF file. -Example -------- +Example: +-------- This is how to read FIDUCEO MVIRI FCDR data in satpy: .. code-block:: python @@ -143,8 +143,8 @@ sza_vis = scn[query_vis] -References ----------- +References: +----------- - `[Handbook]`_ MFG User Handbook - `[PUG]`_ FIDUCEO MVIRI FCDR Product User Guide @@ -173,16 +173,16 @@ MVIRI_FIELD_OF_VIEW = 18.0 """[Handbook] section 5.3.2.1.""" -CHANNELS = ['VIS', 'WV', 'IR'] +CHANNELS = ["VIS", "WV", "IR"] ANGLES = [ - 'solar_zenith_angle', - 'solar_azimuth_angle', - 'satellite_zenith_angle', - 'satellite_azimuth_angle' + "solar_zenith_angle", + "solar_azimuth_angle", + "satellite_zenith_angle", + "satellite_azimuth_angle" ] OTHER_REFLECTANCES = [ - 'u_independent_toa_bidirectional_reflectance', - 'u_structured_toa_bidirectional_reflectance' + "u_independent_toa_bidirectional_reflectance", + "u_structured_toa_bidirectional_reflectance" ] HIGH_RESOL = 2250 @@ -200,19 +200,19 @@ def __init__(self, coefs): def calibrate(self, counts, calibration): """Calibrate IR/WV counts to the given calibration.""" - if calibration == 'counts': + if calibration == "counts": return counts - elif calibration in ('radiance', 'brightness_temperature'): + elif calibration in ("radiance", "brightness_temperature"): return self._calibrate_rad_bt(counts, calibration) else: raise KeyError( - 'Invalid IR/WV calibration: {}'.format(calibration.name) + "Invalid IR/WV calibration: {}".format(calibration.name) ) def _calibrate_rad_bt(self, counts, calibration): """Calibrate counts to radiance or brightness temperature.""" rad = self._counts_to_radiance(counts) - if calibration == 'radiance': + if calibration == "radiance": return rad bt = self._radiance_to_brightness_temperature(rad) return bt @@ -222,7 +222,7 @@ def _counts_to_radiance(self, counts): Reference: [PUG], equations (4.1) and (4.2). """ - rad = self.coefs['a'] + self.coefs['b'] * counts + rad = self.coefs["a"] + self.coefs["b"] * counts return rad.where(rad > 0, np.float32(np.nan)) def _radiance_to_brightness_temperature(self, rad): @@ -230,7 +230,7 @@ def _radiance_to_brightness_temperature(self, rad): Reference: [PUG], equations (5.1) and (5.2). """ - bt = self.coefs['bt_b'] / (np.log(rad) - self.coefs['bt_a']) + bt = self.coefs["bt_b"] / (np.log(rad) - self.coefs["bt_a"]) return bt.where(bt > 0, np.float32(np.nan)) @@ -252,19 +252,19 @@ def __init__(self, coefs, solar_zenith_angle=None): def calibrate(self, counts, calibration): """Calibrate VIS counts.""" - if calibration == 'counts': + if calibration == "counts": return counts - elif calibration in ('radiance', 'reflectance'): + elif calibration in ("radiance", "reflectance"): return self._calibrate_rad_refl(counts, calibration) else: raise KeyError( - 'Invalid VIS calibration: {}'.format(calibration.name) + "Invalid VIS calibration: {}".format(calibration.name) ) def _calibrate_rad_refl(self, counts, calibration): """Calibrate counts to radiance or reflectance.""" rad = self._counts_to_radiance(counts) - if calibration == 'radiance': + if calibration == "radiance": return rad refl = self._radiance_to_reflectance(rad) refl = self.update_refl_attrs(refl) @@ -275,11 +275,11 @@ def _counts_to_radiance(self, counts): Reference: [PUG], equations (7) and (8). """ - years_since_launch = self.coefs['years_since_launch'] - a_cf = (self.coefs['a0'] + - self.coefs['a1'] * years_since_launch + - self.coefs['a2'] * years_since_launch ** 2) - mean_count_space_vis = self.coefs['mean_count_space'] + years_since_launch = self.coefs["years_since_launch"] + a_cf = (self.coefs["a0"] + + self.coefs["a1"] * years_since_launch + + self.coefs["a2"] * years_since_launch ** 2) + mean_count_space_vis = self.coefs["mean_count_space"] rad = (counts - mean_count_space_vis) * a_cf return rad.where(rad > 0, np.float32(np.nan)) @@ -298,17 +298,17 @@ def _radiance_to_reflectance(self, rad): ) # direct illumination only cos_sza = np.cos(np.deg2rad(sza)) refl = ( - (np.pi * self.coefs['distance_sun_earth'] ** 2) / - (self.coefs['solar_irradiance'] * cos_sza) * + (np.pi * self.coefs["distance_sun_earth"] ** 2) / + (self.coefs["solar_irradiance"] * cos_sza) * rad ) return self.refl_factor_to_percent(refl) def update_refl_attrs(self, refl): """Update attributes of reflectance datasets.""" - refl.attrs['sun_earth_distance_correction_applied'] = True - refl.attrs['sun_earth_distance_correction_factor'] = self.coefs[ - 'distance_sun_earth'].item() + refl.attrs["sun_earth_distance_correction_applied"] = True + refl.attrs["sun_earth_distance_correction_factor"] = self.coefs[ + "distance_sun_earth"].item() return refl @staticmethod @@ -328,24 +328,24 @@ def get_area_def(self, im_size, projection_longitude): def _get_proj_params(self, im_size, projection_longitude): """Get projection parameters for the given settings.""" - area_name = 'geos_mviri_{0}x{0}'.format(im_size) + area_name = "geos_mviri_{0}x{0}".format(im_size) lfac, cfac, loff, coff = self._get_factors_offsets(im_size) return { - 'ssp_lon': projection_longitude, - 'a': EQUATOR_RADIUS, - 'b': POLE_RADIUS, - 'h': ALTITUDE, - 'units': 'm', - 'loff': loff - im_size, - 'coff': coff, - 'lfac': -lfac, - 'cfac': -cfac, - 'nlines': im_size, - 'ncols': im_size, - 'scandir': 'S2N', # Reference: [PUG] section 2. - 'p_id': area_name, - 'a_name': area_name, - 'a_desc': 'MVIRI Geostationary Projection' + "ssp_lon": projection_longitude, + "a": EQUATOR_RADIUS, + "b": POLE_RADIUS, + "h": ALTITUDE, + "units": "m", + "loff": loff - im_size, + "coff": coff, + "lfac": -lfac, + "cfac": -cfac, + "nlines": im_size, + "ncols": im_size, + "scandir": "S2N", # Reference: [PUG] section 2. + "p_id": area_name, + "a_name": area_name, + "a_desc": "MVIRI Geostationary Projection" } def _get_factors_offsets(self, im_size): @@ -382,7 +382,7 @@ def interp_tiepoints(ds, target_x, target_y): # No tiepoint coordinates specified in the files. Use dimensions # to calculate tiepoint sampling and assign tiepoint coordinates # accordingly. - sampling = target_x.size // ds.coords['x'].size + sampling = target_x.size // ds.coords["x"].size ds = ds.assign_coords(x=target_x.values[::sampling], y=target_y.values[::sampling]) @@ -406,11 +406,11 @@ def interp_acq_time(time2d, target_y): Mean scanline acquisition timestamps """ # Compute mean timestamp per scanline - time = time2d.mean(dim='x') + time = time2d.mean(dim="x") # If required, repeat timestamps in y-direction to obtain higher # resolution - y = time.coords['y'].values + y = time.coords["y"].values if y.size < target_y.size: reps = target_y.size // y.size y_rep = np.repeat(y, reps) @@ -476,15 +476,15 @@ def __getitem__(self, item): def _should_dims_be_renamed(self, ds): """Determine whether dataset dimensions need to be renamed.""" - return 'y_ir_wv' in ds.dims or 'y_tie' in ds.dims + return "y_ir_wv" in ds.dims or "y_tie" in ds.dims def _rename_dims(self, ds): """Rename dataset dimensions to match satpy's expectations.""" new_names = { - 'y_ir_wv': 'y', - 'x_ir_wv': 'x', - 'y_tie': 'y', - 'x_tie': 'x' + "y_ir_wv": "y", + "x_ir_wv": "x", + "y_tie": "y", + "x_tie": "x" } for old_name, new_name in new_names.items(): if old_name in ds.dims: @@ -492,7 +492,7 @@ def _rename_dims(self, ds): return ds def _coordinates_not_assigned(self, ds): - return 'y' in ds.dims and 'y' not in ds.coords + return "y" in ds.dims and "y" not in ds.coords def _reassign_coords(self, ds): """Re-assign coordinates. @@ -500,14 +500,14 @@ def _reassign_coords(self, ds): For some reason xarray doesn't assign coordinates to all high resolution data variables. """ - return ds.assign_coords({'y': self.nc.coords['y'], - 'x': self.nc.coords['x']}) + return ds.assign_coords({"y": self.nc.coords["y"], + "x": self.nc.coords["x"]}) def _cleanup_attrs(self, ds): """Cleanup dataset attributes.""" # Remove ancillary_variables attribute to avoid downstream # satpy warnings. - ds.attrs.pop('ancillary_variables', None) + ds.attrs.pop("ancillary_variables", None) def get_time(self): """Get time coordinate. @@ -515,29 +515,29 @@ def get_time(self): Variable is sometimes named "time" and sometimes "time_ir_wv". """ try: - return self['time_ir_wv'] + return self["time_ir_wv"] except KeyError: - return self['time'] + return self["time"] def get_xy_coords(self, resolution): """Get x and y coordinates for the given resolution.""" if is_high_resol(resolution): - return self.nc.coords['x'], self.nc.coords['y'] - return self.nc.coords['x_ir_wv'], self.nc.coords['x_ir_wv'] + return self.nc.coords["x"], self.nc.coords["y"] + return self.nc.coords["x_ir_wv"], self.nc.coords["x_ir_wv"] def get_image_size(self, resolution): """Get image size for the given resolution.""" if is_high_resol(resolution): - return self.nc.coords['y'].size - return self.nc.coords['y_ir_wv'].size + return self.nc.coords["y"].size + return self.nc.coords["y_ir_wv"].size class FiduceoMviriBase(BaseFileHandler): """Baseclass for FIDUCEO MVIRI file handlers.""" nc_keys = { - 'WV': 'count_wv', - 'IR': 'count_ir' + "WV": "count_wv", + "IR": "count_ir" } def __init__(self, filename, filename_info, filetype_info, @@ -555,16 +555,16 @@ def __init__(self, filename, filename_info, filetype_info, self.mask_bad_quality = mask_bad_quality nc_raw = xr.open_dataset( filename, - chunks={'x': CHUNK_SIZE, - 'y': CHUNK_SIZE, - 'x_ir_wv': CHUNK_SIZE, - 'y_ir_wv': CHUNK_SIZE} + chunks={"x": CHUNK_SIZE, + "y": CHUNK_SIZE, + "x_ir_wv": CHUNK_SIZE, + "y_ir_wv": CHUNK_SIZE} ) self.nc = DatasetWrapper(nc_raw) # Projection longitude is not provided in the file, read it from the # filename. - self.projection_longitude = float(filename_info['projection_longitude']) + self.projection_longitude = float(filename_info["projection_longitude"]) self.calib_coefs = self._get_calib_coefs() self._get_angles = functools.lru_cache(maxsize=8)( @@ -576,12 +576,12 @@ def __init__(self, filename, filename_info, filetype_info, def get_dataset(self, dataset_id, dataset_info): """Get the dataset.""" - name = dataset_id['name'] - resolution = dataset_id['resolution'] + name = dataset_id["name"] + resolution = dataset_id["resolution"] if name in ANGLES: ds = self._get_angles(name, resolution) elif name in CHANNELS: - ds = self._get_channel(name, resolution, dataset_id['calibration']) + ds = self._get_channel(name, resolution, dataset_id["calibration"]) else: ds = self._get_other_dataset(name) ds = self._cleanup_coords(ds) @@ -590,7 +590,7 @@ def get_dataset(self, dataset_id, dataset_info): def get_area_def(self, dataset_id): """Get area definition of the given dataset.""" - im_size = self.nc.get_image_size(dataset_id['resolution']) + im_size = self.nc.get_image_size(dataset_id["resolution"]) nav = Navigator() return nav.get_area_def( im_size=im_size, @@ -605,13 +605,13 @@ def _get_channel(self, name, resolution, calibration): channel=name, calibration=calibration ) - if name == 'VIS': - qc = VisQualityControl(self.nc['quality_pixel_bitmask']) + if name == "VIS": + qc = VisQualityControl(self.nc["quality_pixel_bitmask"]) if self.mask_bad_quality: ds = qc.mask(ds) else: qc.check() - ds['acq_time'] = self._get_acq_time(resolution) + ds["acq_time"] = self._get_acq_time(resolution) return ds def _get_angles_uncached(self, name, resolution): @@ -638,10 +638,10 @@ def _get_other_dataset(self, name): def _update_attrs(self, ds, info): """Update dataset attributes.""" ds.attrs.update(info) - ds.attrs.update({'platform': self.filename_info['platform'], - 'sensor': self.filename_info['sensor']}) - ds.attrs['raw_metadata'] = self.nc.attrs - ds.attrs['orbital_parameters'] = self._get_orbital_parameters() + ds.attrs.update({"platform": self.filename_info["platform"], + "sensor": self.filename_info["sensor"]}) + ds.attrs["raw_metadata"] = self.nc.attrs + ds.attrs["orbital_parameters"] = self._get_orbital_parameters() def _cleanup_coords(self, ds): """Cleanup dataset coordinates. @@ -651,11 +651,11 @@ def _cleanup_coords(self, ds): can assign projection coordinates upstream (based on the area definition). """ - return ds.drop_vars(['y', 'x']) + return ds.drop_vars(["y", "x"]) def _calibrate(self, ds, channel, calibration): """Calibrate the given dataset.""" - if channel == 'VIS': + if channel == "VIS": return self._calibrate_vis(ds, channel, calibration) calib = IRWVCalibrator(self.calib_coefs[channel]) return calib.calibrate(ds, calibration) @@ -671,21 +671,21 @@ def _get_calib_coefs(self): Note: Only coefficients present in both file types. """ coefs = { - 'VIS': { - 'distance_sun_earth': self.nc['distance_sun_earth'], - 'solar_irradiance': self.nc['solar_irradiance_vis'] + "VIS": { + "distance_sun_earth": self.nc["distance_sun_earth"], + "solar_irradiance": self.nc["solar_irradiance_vis"] }, - 'IR': { - 'a': self.nc['a_ir'], - 'b': self.nc['b_ir'], - 'bt_a': self.nc['bt_a_ir'], - 'bt_b': self.nc['bt_b_ir'] + "IR": { + "a": self.nc["a_ir"], + "b": self.nc["b_ir"], + "bt_a": self.nc["bt_a_ir"], + "bt_b": self.nc["bt_b_ir"] }, - 'WV': { - 'a': self.nc['a_wv'], - 'b': self.nc['b_wv'], - 'bt_a': self.nc['bt_a_wv'], - 'bt_b': self.nc['bt_b_wv'] + "WV": { + "a": self.nc["a_wv"], + "b": self.nc["b_wv"], + "bt_a": self.nc["bt_a_wv"], + "bt_b": self.nc["bt_b_wv"] }, } @@ -710,15 +710,15 @@ def _get_acq_time_uncached(self, resolution): def _get_orbital_parameters(self): """Get the orbital parameters.""" orbital_parameters = { - 'projection_longitude': self.projection_longitude, - 'projection_latitude': 0.0, - 'projection_altitude': ALTITUDE + "projection_longitude": self.projection_longitude, + "projection_latitude": 0.0, + "projection_altitude": ALTITUDE } ssp_lon, ssp_lat = self._get_ssp_lonlat() if not np.isnan(ssp_lon) and not np.isnan(ssp_lat): orbital_parameters.update({ - 'satellite_actual_longitude': ssp_lon, - 'satellite_actual_latitude': ssp_lat, + "satellite_actual_longitude": ssp_lon, + "satellite_actual_latitude": ssp_lat, # altitude not available }) return orbital_parameters @@ -733,13 +733,13 @@ def _get_ssp_lonlat(self): Returns: Subsatellite longitude and latitude """ - ssp_lon = self._get_ssp('longitude') - ssp_lat = self._get_ssp('latitude') + ssp_lon = self._get_ssp("longitude") + ssp_lat = self._get_ssp("latitude") return ssp_lon, ssp_lat def _get_ssp(self, coord): - key_start = 'sub_satellite_{}_start'.format(coord) - key_end = 'sub_satellite_{}_end'.format(coord) + key_start = "sub_satellite_{}_start".format(coord) + key_end = "sub_satellite_{}_end".format(coord) try: sub_lonlat = np.nanmean( [self.nc[key_start].values, @@ -755,42 +755,42 @@ class FiduceoMviriEasyFcdrFileHandler(FiduceoMviriBase): """File handler for FIDUCEO MVIRI Easy FCDR.""" nc_keys = FiduceoMviriBase.nc_keys.copy() - nc_keys['VIS'] = 'toa_bidirectional_reflectance_vis' + nc_keys["VIS"] = "toa_bidirectional_reflectance_vis" def _calibrate_vis(self, ds, channel, calibration): """Calibrate VIS channel. Easy FCDR provides reflectance only, no counts or radiance. """ - if calibration == 'reflectance': + if calibration == "reflectance": coefs = self.calib_coefs[channel] cal = VISCalibrator(coefs) refl = cal.refl_factor_to_percent(ds) refl = cal.update_refl_attrs(refl) return refl - elif calibration in ('counts', 'radiance'): - raise ValueError('Cannot calibrate to {}. Easy FCDR provides ' - 'reflectance only.'.format(calibration.name)) + elif calibration in ("counts", "radiance"): + raise ValueError("Cannot calibrate to {}. Easy FCDR provides " + "reflectance only.".format(calibration.name)) else: - raise KeyError('Invalid calibration: {}'.format(calibration.name)) + raise KeyError("Invalid calibration: {}".format(calibration.name)) class FiduceoMviriFullFcdrFileHandler(FiduceoMviriBase): """File handler for FIDUCEO MVIRI Full FCDR.""" nc_keys = FiduceoMviriBase.nc_keys.copy() - nc_keys['VIS'] = 'count_vis' + nc_keys["VIS"] = "count_vis" def _get_calib_coefs(self): """Add additional VIS coefficients only present in full FCDR.""" coefs = super()._get_calib_coefs() - coefs['VIS'].update({ - 'years_since_launch': np.float32(self.nc['years_since_launch']), - 'a0': np.float32(self.nc['a0_vis']), - 'a1': np.float32(self.nc['a1_vis']), - 'a2': np.float32(self.nc['a2_vis']), - 'mean_count_space': np.float32( - self.nc['mean_count_space_vis'] + coefs["VIS"].update({ + "years_since_launch": np.float32(self.nc["years_since_launch"]), + "a0": np.float32(self.nc["a0_vis"]), + "a1": np.float32(self.nc["a1_vis"]), + "a2": np.float32(self.nc["a2_vis"]), + "mean_count_space": np.float32( + self.nc["mean_count_space_vis"] ) }) return coefs @@ -798,7 +798,7 @@ def _get_calib_coefs(self): def _calibrate_vis(self, ds, channel, calibration): """Calibrate VIS channel.""" sza = None - if calibration == 'reflectance': - sza = self._get_angles('solar_zenith_angle', HIGH_RESOL) + if calibration == "reflectance": + sza = self._get_angles("solar_zenith_angle", HIGH_RESOL) cal = VISCalibrator(self.calib_coefs[channel], sza) return cal.calibrate(ds, calibration) diff --git a/satpy/readers/mws_l1b.py b/satpy/readers/mws_l1b.py index 528675eeb5..372a59ac37 100644 --- a/satpy/readers/mws_l1b.py +++ b/satpy/readers/mws_l1b.py @@ -33,23 +33,23 @@ # parameter name and values are the paths to the variable inside the netcdf AUX_DATA = { - 'scantime_utc': 'data/navigation/mws_scantime_utc', - 'solar_azimuth': 'data/navigation/mws_solar_azimuth_angle', - 'solar_zenith': 'data/navigation/mws_solar_zenith_angle', - 'satellite_azimuth': 'data/navigation/mws_satellite_azimuth_angle', - 'satellite_zenith': 'data/navigation/mws_satellite_zenith_angle', - 'surface_type': 'data/navigation/mws_surface_type', - 'terrain_elevation': 'data/navigation/mws_terrain_elevation', - 'mws_lat': 'data/navigation/mws_lat', - 'mws_lon': 'data/navigation/mws_lon', + "scantime_utc": "data/navigation/mws_scantime_utc", + "solar_azimuth": "data/navigation/mws_solar_azimuth_angle", + "solar_zenith": "data/navigation/mws_solar_zenith_angle", + "satellite_azimuth": "data/navigation/mws_satellite_azimuth_angle", + "satellite_zenith": "data/navigation/mws_satellite_zenith_angle", + "surface_type": "data/navigation/mws_surface_type", + "terrain_elevation": "data/navigation/mws_terrain_elevation", + "mws_lat": "data/navigation/mws_lat", + "mws_lon": "data/navigation/mws_lon", } -MWS_CHANNEL_NAMES_TO_NUMBER = {'1': 1, '2': 2, '3': 3, '4': 4, - '5': 5, '6': 6, '7': 7, '8': 8, - '9': 9, '10': 10, '11': 11, '12': 12, - '13': 13, '14': 14, '15': 15, '16': 16, - '17': 17, '18': 18, '19': 19, '20': 20, - '21': 21, '22': 22, '23': 23, '24': 24} +MWS_CHANNEL_NAMES_TO_NUMBER = {"1": 1, "2": 2, "3": 3, "4": 4, + "5": 5, "6": 6, "7": 7, "8": 8, + "9": 9, "10": 10, "11": 11, "12": 12, + "13": 13, "14": 14, "15": 15, "16": 16, + "17": 17, "18": 18, "19": 19, "20": 20, + "21": 21, "22": 22, "23": 23, "24": 24} MWS_CHANNEL_NAMES = list(MWS_CHANNEL_NAMES_TO_NUMBER.keys()) MWS_CHANNELS = set(MWS_CHANNEL_NAMES) @@ -90,9 +90,9 @@ def __init__(self, filename, filename_info, filetype_info): filetype_info, cache_var_size=10000, cache_handle=True) - logger.debug('Reading: {}'.format(self.filename)) - logger.debug('Start: {}'.format(self.start_time)) - logger.debug('End: {}'.format(self.end_time)) + logger.debug("Reading: {}".format(self.filename)) + logger.debug("Start: {}".format(self.start_time)) + logger.debug("End: {}".format(self.end_time)) self._cache = {} @@ -101,57 +101,57 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Get start time.""" - return datetime.strptime(self['/attr/sensing_start_time_utc'], - '%Y-%m-%d %H:%M:%S.%f') + return datetime.strptime(self["/attr/sensing_start_time_utc"], + "%Y-%m-%d %H:%M:%S.%f") @property def end_time(self): """Get end time.""" - return datetime.strptime(self['/attr/sensing_end_time_utc'], - '%Y-%m-%d %H:%M:%S.%f') + return datetime.strptime(self["/attr/sensing_end_time_utc"], + "%Y-%m-%d %H:%M:%S.%f") @property def sensor(self): """Get the sensor name.""" - return self['/attr/instrument'] + return self["/attr/instrument"] @property def platform_name(self): """Get the platform name.""" - return self._platform_name_translate.get(self['/attr/spacecraft']) + return self._platform_name_translate.get(self["/attr/spacecraft"]) @property def sub_satellite_longitude_start(self): """Get the longitude of sub-satellite point at start of the product.""" - return self['status/satellite/subsat_longitude_start'].data.item() + return self["status/satellite/subsat_longitude_start"].data.item() @property def sub_satellite_latitude_start(self): """Get the latitude of sub-satellite point at start of the product.""" - return self['status/satellite/subsat_latitude_start'].data.item() + return self["status/satellite/subsat_latitude_start"].data.item() @property def sub_satellite_longitude_end(self): """Get the longitude of sub-satellite point at end of the product.""" - return self['status/satellite/subsat_longitude_end'].data.item() + return self["status/satellite/subsat_longitude_end"].data.item() @property def sub_satellite_latitude_end(self): """Get the latitude of sub-satellite point at end of the product.""" - return self['status/satellite/subsat_latitude_end'].data.item() + return self["status/satellite/subsat_latitude_end"].data.item() def get_dataset(self, dataset_id, dataset_info): """Get dataset using file_key in dataset_info.""" - logger.debug('Reading {} from {}'.format(dataset_id['name'], self.filename)) + logger.debug("Reading {} from {}".format(dataset_id["name"], self.filename)) - var_key = dataset_info['file_key'] - if _get_aux_data_name_from_dsname(dataset_id['name']) is not None: - variable = self._get_dataset_aux_data(dataset_id['name']) - elif any(lb in dataset_id['name'] for lb in MWS_CHANNELS): - logger.debug(f'Reading in file to get dataset with key {var_key}.') + var_key = dataset_info["file_key"] + if _get_aux_data_name_from_dsname(dataset_id["name"]) is not None: + variable = self._get_dataset_aux_data(dataset_id["name"]) + elif any(lb in dataset_id["name"] for lb in MWS_CHANNELS): + logger.debug(f"Reading in file to get dataset with key {var_key}.") variable = self._get_dataset_channel(dataset_id, dataset_info) else: - logger.warning(f'Could not find key {var_key} in NetCDF file, no valid Dataset created') # noqa: E501 + logger.warning(f"Could not find key {var_key} in NetCDF file, no valid Dataset created") # noqa: E501 return None variable = self._manage_attributes(variable, dataset_info) @@ -162,10 +162,10 @@ def get_dataset(self, dataset_id, dataset_info): @staticmethod def _standardize_dims(variable): """Standardize dims to y, x.""" - if 'n_scans' in variable.dims: - variable = variable.rename({'n_fovs': 'x', 'n_scans': 'y'}) - if variable.dims[0] == 'x': - variable = variable.transpose('y', 'x') + if "n_scans" in variable.dims: + variable = variable.rename({"n_fovs": "x", "n_scans": "y"}) + if variable.dims[0] == "x": + variable = variable.transpose("y", "x") return variable @staticmethod @@ -178,7 +178,7 @@ def _drop_coords(variable): def _manage_attributes(self, variable, dataset_info): """Manage attributes of the dataset.""" - variable.attrs.setdefault('units', None) + variable.attrs.setdefault("units", None) variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) return variable @@ -192,8 +192,8 @@ def _get_dataset_channel(self, key, dataset_info): """ # Get the dataset # Get metadata for given dataset - grp_pth = dataset_info['file_key'] - channel_index = get_channel_index_from_name(key['name']) + grp_pth = dataset_info["file_key"] + channel_index = get_channel_index_from_name(key["name"]) data = self[grp_pth][:, :, channel_index] attrs = data.attrs.copy() @@ -203,7 +203,7 @@ def _get_dataset_channel(self, key, dataset_info): default_fillvals.get(data.dtype.str[1:], np.nan)) vr = attrs.get("valid_range", [-np.inf, np.inf]) - if key['calibration'] == "counts": + if key["calibration"] == "counts": attrs["_FillValue"] = fv nfv = fv else: @@ -212,18 +212,18 @@ def _get_dataset_channel(self, key, dataset_info): data = data.where(data <= vr[1], nfv) # Manage the attributes of the dataset - data.attrs.setdefault('units', None) + data.attrs.setdefault("units", None) data.attrs.update(dataset_info) - dataset_attrs = getattr(data, 'attrs', {}) + dataset_attrs = getattr(data, "attrs", {}) dataset_attrs.update(dataset_info) dataset_attrs.update({ "platform_name": self.platform_name, "sensor": self.sensor, - "orbital_parameters": {'sub_satellite_latitude_start': self.sub_satellite_latitude_start, - 'sub_satellite_longitude_start': self.sub_satellite_longitude_start, - 'sub_satellite_latitude_end': self.sub_satellite_latitude_end, - 'sub_satellite_longitude_end': self.sub_satellite_longitude_end}, + "orbital_parameters": {"sub_satellite_latitude_start": self.sub_satellite_latitude_start, + "sub_satellite_longitude_start": self.sub_satellite_longitude_start, + "sub_satellite_latitude_end": self.sub_satellite_latitude_end, + "sub_satellite_longitude_end": self.sub_satellite_longitude_end}, }) try: @@ -237,10 +237,10 @@ def _get_dataset_channel(self, key, dataset_info): def _get_dataset_aux_data(self, dsname): """Get the auxiliary data arrays using the index map.""" # Geolocation and navigation data: - if dsname in ['mws_lat', 'mws_lon', - 'solar_azimuth', 'solar_zenith', - 'satellite_azimuth', 'satellite_zenith', - 'surface_type', 'terrain_elevation']: + if dsname in ["mws_lat", "mws_lon", + "solar_azimuth", "solar_zenith", + "satellite_azimuth", "satellite_zenith", + "surface_type", "terrain_elevation"]: var_key = AUX_DATA.get(dsname) else: raise NotImplementedError(f"Dataset {dsname!r} not supported!") @@ -252,30 +252,30 @@ def _get_dataset_aux_data(self, dsname): raise # Scale the data: - if 'scale_factor' in variable.attrs and 'add_offset' in variable.attrs: - missing_value = variable.attrs['missing_value'] + if "scale_factor" in variable.attrs and "add_offset" in variable.attrs: + missing_value = variable.attrs["missing_value"] variable.data = da.where(variable.data == missing_value, np.nan, - variable.data * variable.attrs['scale_factor'] + variable.attrs['add_offset']) + variable.data * variable.attrs["scale_factor"] + variable.attrs["add_offset"]) return variable def _get_global_attributes(self): """Create a dictionary of global attributes.""" return { - 'filename': self.filename, - 'start_time': self.start_time, - 'end_time': self.end_time, - 'spacecraft_name': self.platform_name, - 'sensor': self.sensor, - 'filename_start_time': self.filename_info['start_time'], - 'filename_end_time': self.filename_info['end_time'], - 'platform_name': self.platform_name, - 'quality_group': self._get_quality_attributes(), + "filename": self.filename, + "start_time": self.start_time, + "end_time": self.end_time, + "spacecraft_name": self.platform_name, + "sensor": self.sensor, + "filename_start_time": self.filename_info["start_time"], + "filename_end_time": self.filename_info["end_time"], + "platform_name": self.platform_name, + "quality_group": self._get_quality_attributes(), } def _get_quality_attributes(self): """Get quality attributes.""" - quality_group = self['quality'] + quality_group = self["quality"] quality_dict = {} for key in quality_group: # Add the values (as Numpy array) of each variable in the group diff --git a/satpy/readers/netcdf_utils.py b/satpy/readers/netcdf_utils.py index ac98f81d8d..cb5c38d1cf 100644 --- a/satpy/readers/netcdf_utils.py +++ b/satpy/readers/netcdf_utils.py @@ -103,7 +103,7 @@ def __init__(self, filename, filename_info, filetype_info, file_handle = self._get_file_handle() except IOError: LOG.exception( - 'Failed reading file %s. Possibly corrupted file', self.filename) + "Failed reading file %s. Possibly corrupted file", self.filename) raise self._set_file_handle_auto_maskandscale(file_handle, auto_maskandscale) @@ -123,7 +123,7 @@ def __init__(self, filename, filename_info, filetype_info, file_handle.close() def _get_file_handle(self): - return netCDF4.Dataset(self.filename, 'r') + return netCDF4.Dataset(self.filename, "r") @staticmethod def _set_file_handle_auto_maskandscale(file_handle, auto_maskandscale): @@ -132,8 +132,8 @@ def _set_file_handle_auto_maskandscale(file_handle, auto_maskandscale): def _set_xarray_kwargs(self, xarray_kwargs, auto_maskandscale): self._xarray_kwargs = xarray_kwargs or {} - self._xarray_kwargs.setdefault('chunks', CHUNK_SIZE) - self._xarray_kwargs.setdefault('mask_and_scale', auto_maskandscale) + self._xarray_kwargs.setdefault("chunks", CHUNK_SIZE) + self._xarray_kwargs.setdefault("mask_and_scale", auto_maskandscale) def collect_metadata(self, name, obj): """Collect all file variables and attributes for the provided file object. @@ -171,11 +171,11 @@ def _collect_variable_info(self, var_name, var_obj): def _collect_listed_variables(self, file_handle, listed_variables): variable_name_replacements = self.filetype_info.get("variable_name_replacements") for itm in self._get_required_variable_names(listed_variables, variable_name_replacements): - parts = itm.split('/') + parts = itm.split("/") grp = file_handle for p in parts[:-1]: if p == "attr": - n = '/'.join(parts) + n = "/".join(parts) self.file_content[n] = self._get_attr_value(grp, parts[-1]) break grp = grp[p] @@ -188,7 +188,7 @@ def _collect_listed_variables(self, file_handle, listed_variables): def _get_required_variable_names(listed_variables, variable_name_replacements): variable_names = [] for var in listed_variables: - if variable_name_replacements and '{' in var: + if variable_name_replacements and "{" in var: _compose_replacement_names(variable_name_replacements, var, variable_names) else: variable_names.append(var) @@ -290,7 +290,7 @@ def _get_variable(self, key, val): # these datasets are closed and inaccessible when the file is # closed, need to reopen # TODO: Handle HDF4 versus NetCDF3 versus NetCDF4 - parts = key.rsplit('/', 1) + parts = key.rsplit("/", 1) if len(parts) == 2: group, key = parts else: @@ -392,7 +392,7 @@ def _get_file_handle(self): import h5netcdf f_obj = open_file_or_filename(self.filename) self._use_h5netcdf = True - return h5netcdf.File(f_obj, 'r') + return h5netcdf.File(f_obj, "r") def __getitem__(self, key): """Get item for given key.""" diff --git a/satpy/readers/nucaps.py b/satpy/readers/nucaps.py index 806a20cece..2c9e2ba39f 100644 --- a/satpy/readers/nucaps.py +++ b/satpy/readers/nucaps.py @@ -70,10 +70,10 @@ class NUCAPSFileHandler(NetCDF4FileHandler): def __init__(self, *args, **kwargs): """Initialize file handler.""" # remove kwargs that reader instance used that file handler does not - kwargs.pop('mask_surface', None) - kwargs.pop('mask_quality', None) - kwargs.setdefault('xarray_kwargs', {}).setdefault( - 'decode_times', False) + kwargs.pop("mask_surface", None) + kwargs.pop("mask_quality", None) + kwargs.setdefault("xarray_kwargs", {}).setdefault( + "decode_times", False) super(NUCAPSFileHandler, self).__init__(*args, **kwargs) def __contains__(self, item): @@ -88,25 +88,25 @@ def _parse_datetime(self, datestr): def start_time(self): """Get start time.""" try: - return self._parse_datetime(self['/attr/time_coverage_start']) + return self._parse_datetime(self["/attr/time_coverage_start"]) except KeyError: # If attribute not present, use time from file name - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" try: - return self._parse_datetime(self['/attr/time_coverage_end']) + return self._parse_datetime(self["/attr/time_coverage_end"]) except KeyError: # If attribute not present, use time from file name - return self.filename_info['end_time'] + return self.filename_info["end_time"] @property def start_orbit_number(self): """Return orbit number for the beginning of the swath.""" try: - return int(self['/attr/start_orbit_number']) + return int(self["/attr/start_orbit_number"]) except KeyError: return 0 @@ -114,7 +114,7 @@ def start_orbit_number(self): def end_orbit_number(self): """Return orbit number for the end of the swath.""" try: - return int(self['/attr/end_orbit_number']) + return int(self["/attr/end_orbit_number"]) except KeyError: return 0 @@ -122,29 +122,29 @@ def end_orbit_number(self): def platform_name(self): """Return standard platform name for the file's data.""" try: - res = self['/attr/platform_name'] + res = self["/attr/platform_name"] if isinstance(res, np.ndarray): return str(res.astype(str)) return res except KeyError: - return self.filename_info['platform_shortname'] + return self.filename_info["platform_shortname"] @property def sensor_names(self): """Return standard sensor or instrument name for the file's data.""" try: - res = self['/attr/instrument_name'] - res = [x.strip() for x in res.split(',')] + res = self["/attr/instrument_name"] + res = [x.strip() for x in res.split(",")] if len(res) == 1: return res[0].lower() except KeyError: - res = ['CrIS', 'ATMS', 'VIIRS'] + res = ["CrIS", "ATMS", "VIIRS"] return set(name.lower() for name in res) def get_shape(self, ds_id, ds_info): """Return data array shape for item specified.""" - var_path = ds_info.get('file_key', '{}'.format(ds_id['name'])) - if var_path + '/shape' not in self: + var_path = ds_info.get("file_key", "{}".format(ds_id["name"])) + if var_path + "/shape" not in self: # loading a scalar value shape = 1 else: @@ -157,11 +157,11 @@ def get_shape(self, ds_id, ds_info): def get_metadata(self, dataset_id, ds_info): """Get metadata.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) shape = self.get_shape(dataset_id, ds_info) - file_units = ds_info.get('file_units', - self.get(var_path + '/attr/units')) - ds_info.update(getattr(self[var_path], 'attrs', {})) + file_units = ds_info.get("file_units", + self.get(var_path + "/attr/units")) + ds_info.update(getattr(self[var_path], "attrs", {})) # don't overwrite information in the files attrs because the same # `.attrs` is used for each separate Temperature pressure level dataset # Plus, if someone gets metadata multiple times then we are screwed @@ -176,22 +176,22 @@ def get_metadata(self, dataset_id, ds_info): "start_orbit": self.start_orbit_number, "end_orbit": self.end_orbit_number, }) - if 'standard_name' not in info: - sname_path = var_path + '/attr/standard_name' - info['standard_name'] = self.get(sname_path) - if dataset_id['name'] != 'Quality_Flag': - anc_vars = info.get('ancillary_variables', []) - if 'Quality_Flag' not in anc_vars: - anc_vars.append('Quality_Flag') - info['ancillary_variables'] = anc_vars + if "standard_name" not in info: + sname_path = var_path + "/attr/standard_name" + info["standard_name"] = self.get(sname_path) + if dataset_id["name"] != "Quality_Flag": + anc_vars = info.get("ancillary_variables", []) + if "Quality_Flag" not in anc_vars: + anc_vars.append("Quality_Flag") + info["ancillary_variables"] = anc_vars return info def get_dataset(self, dataset_id, ds_info): """Load data array and metadata for specified dataset.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) metadata = self.get_metadata(dataset_id, ds_info) - valid_min, valid_max = self[var_path + '/attr/valid_range'] - fill_value = self.get(var_path + '/attr/_FillValue') + valid_min, valid_max = self[var_path + "/attr/valid_range"] + fill_value = self.get(var_path + "/attr/_FillValue") d_tmp = self[var_path] if "index" in ds_info: @@ -200,19 +200,19 @@ def get_dataset(self, dataset_id, ds_info): d_tmp = d_tmp[..., int(ds_info["pressure_index"])] # this is a pressure based field # include surface_pressure as metadata - sp = self['Surface_Pressure'] + sp = self["Surface_Pressure"] # Older format - if 'number_of_FORs' in sp.dims: - sp = sp.rename({'number_of_FORs': 'y'}) + if "number_of_FORs" in sp.dims: + sp = sp.rename({"number_of_FORs": "y"}) # Newer format - if 'Number_of_CrIS_FORs' in sp.dims: - sp = sp.rename({'Number_of_CrIS_FORs': 'y'}) - if 'surface_pressure' in ds_info: - ds_info['surface_pressure'] = xr.concat((ds_info['surface_pressure'], sp), dim='y') + if "Number_of_CrIS_FORs" in sp.dims: + sp = sp.rename({"Number_of_CrIS_FORs": "y"}) + if "surface_pressure" in ds_info: + ds_info["surface_pressure"] = xr.concat((ds_info["surface_pressure"], sp), dim="y") else: - ds_info['surface_pressure'] = sp + ds_info["surface_pressure"] = sp # include all the pressure levels - ds_info.setdefault('pressure_levels', self['Pressure'][0]) + ds_info.setdefault("pressure_levels", self["Pressure"][0]) data = d_tmp if valid_min is not None and valid_max is not None: @@ -221,16 +221,16 @@ def get_dataset(self, dataset_id, ds_info): if fill_value is not None: data = data.where(data != fill_value) # this _FillValue is no longer valid - metadata.pop('_FillValue', None) - data.attrs.pop('_FillValue', None) + metadata.pop("_FillValue", None) + data.attrs.pop("_FillValue", None) data.attrs.update(metadata) # Older format - if 'number_of_FORs' in data.dims: - data = data.rename({'number_of_FORs': 'y'}) + if "number_of_FORs" in data.dims: + data = data.rename({"number_of_FORs": "y"}) # Newer format - if 'Number_of_CrIS_FORs' in data.dims: - data = data.rename({'Number_of_CrIS_FORs': 'y'}) + if "Number_of_CrIS_FORs" in data.dims: + data = data.rename({"Number_of_CrIS_FORs": "y"}) return data @@ -248,8 +248,8 @@ def __init__(self, config_files, mask_surface=True, mask_quality=True, **kwargs) self.pressure_dataset_names = defaultdict(list) super(NUCAPSReader, self).__init__(config_files, **kwargs) - self.mask_surface = self.info.get('mask_surface', mask_surface) - self.mask_quality = self.info.get('mask_quality', mask_quality) + self.mask_surface = self.info.get("mask_surface", mask_surface) + self.mask_quality = self.info.get("mask_quality", mask_quality) def load_ds_ids_from_config(self): """Convert config dataset entries to DataIDs. @@ -263,7 +263,7 @@ def load_ds_ids_from_config(self): super(NUCAPSReader, self).load_ds_ids_from_config() for ds_id in list(self.all_ids.keys()): ds_info = self.all_ids[ds_id] - if ds_info.get('pressure_based', False): + if ds_info.get("pressure_based", False): for idx, lvl_num in enumerate(ALL_PRESSURE_LEVELS): if lvl_num < 5.0: suffix = "_{:0.03f}mb".format(lvl_num) @@ -271,14 +271,14 @@ def load_ds_ids_from_config(self): suffix = "_{:0.0f}mb".format(lvl_num) new_info = ds_info.copy() - new_info['pressure_level'] = lvl_num - new_info['pressure_index'] = idx - new_info['file_key'] = '{}'.format(ds_id['name']) - new_info['name'] = ds_id['name'] + suffix - new_ds_id = ds_id._replace(name=new_info['name']) - new_info['id'] = new_ds_id + new_info["pressure_level"] = lvl_num + new_info["pressure_index"] = idx + new_info["file_key"] = "{}".format(ds_id["name"]) + new_info["name"] = ds_id["name"] + suffix + new_ds_id = ds_id._replace(name=new_info["name"]) + new_info["id"] = new_ds_id self.all_ids[new_ds_id] = new_info - self.pressure_dataset_names[ds_id['name']].append(new_info['name']) + self.pressure_dataset_names[ds_id["name"]].append(new_info["name"]) def load(self, dataset_keys, previous_datasets=None, pressure_levels=None): """Load data from one or more set of files. @@ -294,7 +294,7 @@ def load(self, dataset_keys, previous_datasets=None, pressure_levels=None): # Add pressure levels to the datasets to load if needed so # we can do further filtering after loading - plevels_ds_id = self.get_dataset_key('Pressure_Levels') + plevels_ds_id = self.get_dataset_key("Pressure_Levels") remove_plevels = False if plevels_ds_id not in dataset_keys: dataset_keys.add(plevels_ds_id) @@ -353,7 +353,7 @@ def _remove_data_at_pressure_levels(datasets_loaded, plevels_ds, pressure_levels if cond is not None: datasets_loaded[ds_id] = ds_obj.where(cond, drop=True) - datasets_loaded[ds_id].attrs['pressure_levels'] = new_plevels + datasets_loaded[ds_id].attrs["pressure_levels"] = new_plevels def _get_pressure_level_condition(plevels_ds, pressure_levels): @@ -399,8 +399,8 @@ def _mask_data_with_quality_flag(datasets_loaded, dataset_keys): for ds_id in sorted(dataset_keys): ds = datasets_loaded[ds_id] quality_flag = [ - x for x in ds.attrs.get('ancillary_variables', []) - if x.attrs.get('name') == 'Quality_Flag'] + x for x in ds.attrs.get("ancillary_variables", []) + if x.attrs.get("name") == "Quality_Flag"] if not quality_flag: continue diff --git a/satpy/readers/nwcsaf_msg2013_hdf5.py b/satpy/readers/nwcsaf_msg2013_hdf5.py index a8fdf45f3c..40a6441655 100644 --- a/satpy/readers/nwcsaf_msg2013_hdf5.py +++ b/satpy/readers/nwcsaf_msg2013_hdf5.py @@ -38,10 +38,10 @@ logger = logging.getLogger(__name__) -PLATFORM_NAMES = {'MSG1': 'Meteosat-8', - 'MSG2': 'Meteosat-9', - 'MSG3': 'Meteosat-10', - 'MSG4': 'Meteosat-11', } +PLATFORM_NAMES = {"MSG1": "Meteosat-8", + "MSG2": "Meteosat-9", + "MSG3": "Meteosat-10", + "MSG4": "Meteosat-11", } class Hdf5NWCSAF(HDF5FileHandler): @@ -56,27 +56,27 @@ def __init__(self, filename, filename_info, filetype_info): def get_dataset(self, dataset_id, ds_info): """Load a dataset.""" - file_key = ds_info.get('file_key', dataset_id['name']) + file_key = ds_info.get("file_key", dataset_id["name"]) data = self[file_key] nodata = None - if 'SCALING_FACTOR' in data.attrs and 'OFFSET' in data.attrs: + if "SCALING_FACTOR" in data.attrs and "OFFSET" in data.attrs: dtype = np.dtype(data.data) - if dataset_id['name'] in ['ctth_alti']: - data.attrs['valid_range'] = (0, 27000) - data.attrs['_FillValue'] = np.nan + if dataset_id["name"] in ["ctth_alti"]: + data.attrs["valid_range"] = (0, 27000) + data.attrs["_FillValue"] = np.nan - if dataset_id['name'] in ['ctth_alti', 'ctth_pres', 'ctth_tempe', 'ctth_effective_cloudiness']: - dtype = np.dtype('float32') + if dataset_id["name"] in ["ctth_alti", "ctth_pres", "ctth_tempe", "ctth_effective_cloudiness"]: + dtype = np.dtype("float32") nodata = 255 - if dataset_id['name'] in ['ct']: - data.attrs['valid_range'] = (0, 20) - data.attrs['_FillValue'] = 255 + if dataset_id["name"] in ["ct"]: + data.attrs["valid_range"] = (0, 20) + data.attrs["_FillValue"] = 255 # data.attrs['palette_meanings'] = list(range(21)) attrs = data.attrs - scaled_data = (data * data.attrs['SCALING_FACTOR'] + data.attrs['OFFSET']).astype(dtype) + scaled_data = (data * data.attrs["SCALING_FACTOR"] + data.attrs["OFFSET"]).astype(dtype) if nodata: scaled_data = scaled_data.where(data != nodata) scaled_data = scaled_data.where(scaled_data >= 0) @@ -92,18 +92,18 @@ def get_dataset(self, dataset_id, ds_info): def get_area_def(self, dsid): """Get the area definition of the datasets in the file.""" - if dsid['name'].endswith('_pal'): + if dsid["name"].endswith("_pal"): raise NotImplementedError - cfac = self.file_content['/attr/CFAC'] - lfac = self.file_content['/attr/LFAC'] - coff = self.file_content['/attr/COFF'] - loff = self.file_content['/attr/LOFF'] - numcols = int(self.file_content['/attr/NC']) - numlines = int(self.file_content['/attr/NL']) + cfac = self.file_content["/attr/CFAC"] + lfac = self.file_content["/attr/LFAC"] + coff = self.file_content["/attr/COFF"] + loff = self.file_content["/attr/LOFF"] + numcols = int(self.file_content["/attr/NC"]) + numlines = int(self.file_content["/attr/NL"]) aex = get_area_extent(cfac, lfac, coff, loff, numcols, numlines) - pname = self.file_content['/attr/PROJECTION_NAME'] + pname = self.file_content["/attr/PROJECTION_NAME"] proj = {} if pname.startswith("GEOS"): proj["proj"] = "geos" @@ -114,8 +114,8 @@ def get_area_def(self, dsid): else: raise NotImplementedError("Only geos projection supported yet.") - area_def = AreaDefinition(self.file_content['/attr/REGION_NAME'], - self.file_content['/attr/REGION_NAME'], + area_def = AreaDefinition(self.file_content["/attr/REGION_NAME"], + self.file_content["/attr/REGION_NAME"], pname, proj, numcols, @@ -127,7 +127,7 @@ def get_area_def(self, dsid): @property def start_time(self): """Return the start time of the object.""" - return datetime.strptime(self.file_content['/attr/IMAGE_ACQUISITION_TIME'], '%Y%m%d%H%M') + return datetime.strptime(self.file_content["/attr/IMAGE_ACQUISITION_TIME"], "%Y%m%d%H%M") def get_area_extent(cfac, lfac, coff, loff, numcols, numlines): diff --git a/satpy/readers/nwcsaf_nc.py b/satpy/readers/nwcsaf_nc.py index 7ecc5f43f4..1b3d65cb96 100644 --- a/satpy/readers/nwcsaf_nc.py +++ b/satpy/readers/nwcsaf_nc.py @@ -42,36 +42,36 @@ CHUNK_SIZE = get_legacy_chunk_size() -SENSOR = {'NOAA-19': 'avhrr-3', - 'NOAA-18': 'avhrr-3', - 'NOAA-15': 'avhrr-3', - 'Metop-A': 'avhrr-3', - 'Metop-B': 'avhrr-3', - 'Metop-C': 'avhrr-3', - 'EOS-Aqua': 'modis', - 'EOS-Terra': 'modis', - 'Suomi-NPP': 'viirs', - 'NOAA-20': 'viirs', - 'NOAA-21': 'viirs', - 'NOAA-22': 'viirs', - 'NOAA-23': 'viirs', - 'JPSS-1': 'viirs', - 'Metop-SG-A1': 'metimage', - 'Metop-SG-A2': 'metimage', - 'Metop-SG-A3': 'metimage', - 'GOES-16': 'abi', - 'GOES-17': 'abi', - 'Himawari-8': 'ahi', - 'Himawari-9': 'ahi', +SENSOR = {"NOAA-19": "avhrr-3", + "NOAA-18": "avhrr-3", + "NOAA-15": "avhrr-3", + "Metop-A": "avhrr-3", + "Metop-B": "avhrr-3", + "Metop-C": "avhrr-3", + "EOS-Aqua": "modis", + "EOS-Terra": "modis", + "Suomi-NPP": "viirs", + "NOAA-20": "viirs", + "NOAA-21": "viirs", + "NOAA-22": "viirs", + "NOAA-23": "viirs", + "JPSS-1": "viirs", + "Metop-SG-A1": "metimage", + "Metop-SG-A2": "metimage", + "Metop-SG-A3": "metimage", + "GOES-16": "abi", + "GOES-17": "abi", + "Himawari-8": "ahi", + "Himawari-9": "ahi", } -PLATFORM_NAMES = {'MSG1': 'Meteosat-8', - 'MSG2': 'Meteosat-9', - 'MSG3': 'Meteosat-10', - 'MSG4': 'Meteosat-11', - 'GOES16': 'GOES-16', - 'GOES17': 'GOES-17', +PLATFORM_NAMES = {"MSG1": "Meteosat-8", + "MSG2": "Meteosat-9", + "MSG3": "Meteosat-10", + "MSG4": "Meteosat-11", + "GOES16": "GOES-16", + "GOES17": "GOES-17", } @@ -93,8 +93,8 @@ def __init__(self, filename, filename_info, filetype_info): mask_and_scale=False, chunks=CHUNK_SIZE) - self.nc = self.nc.rename({'nx': 'x', 'ny': 'y'}) - self.sw_version = self.nc.attrs['source'] + self.nc = self.nc.rename({"nx": "x", "ny": "y"}) + self.sw_version = self.nc.attrs["source"] self.pps = False self.platform_name = None @@ -104,12 +104,12 @@ def __init__(self, filename, filename_info, filetype_info): try: # NWCSAF/Geo: try: - kwrgs = {'sat_id': self.nc.attrs['satellite_identifier']} + kwrgs = {"sat_id": self.nc.attrs["satellite_identifier"]} except KeyError: - kwrgs = {'sat_id': self.nc.attrs['satellite_identifier'].astype(str)} + kwrgs = {"sat_id": self.nc.attrs["satellite_identifier"].astype(str)} except KeyError: # NWCSAF/PPS: - kwrgs = {'platform_name': self.nc.attrs['platform']} + kwrgs = {"platform_name": self.nc.attrs["platform"]} self.set_platform_and_sensor(**kwrgs) @@ -121,17 +121,17 @@ def set_platform_and_sensor(self, **kwargs): """Set some metadata: platform_name, sensors, and pps (identifying PPS or Geo).""" try: # NWCSAF/Geo - self.platform_name = PLATFORM_NAMES.get(kwargs['sat_id'], kwargs['sat_id']) + self.platform_name = PLATFORM_NAMES.get(kwargs["sat_id"], kwargs["sat_id"]) except KeyError: # NWCSAF/PPS - self.platform_name = kwargs['platform_name'] + self.platform_name = kwargs["platform_name"] self.pps = True - self.sensor = set([SENSOR.get(self.platform_name, 'seviri')]) + self.sensor = set([SENSOR.get(self.platform_name, "seviri")]) def remove_timedim(self, var): """Remove time dimension from dataset.""" - if self.pps and var.dims[0] == 'time': + if self.pps and var.dims[0] == "time": data = var[0, :, :] data.attrs = var.attrs var = data @@ -140,19 +140,19 @@ def remove_timedim(self, var): def drop_xycoords(self, variable): """Drop x, y coords when y is scan line number.""" try: - if variable.coords['y'].attrs['long_name'] == "scan line number": - return variable.drop_vars(['y', 'x']) + if variable.coords["y"].attrs["long_name"] == "scan line number": + return variable.drop_vars(["y", "x"]) except KeyError: pass return variable def get_dataset(self, dsid, info): """Load a dataset.""" - dsid_name = dsid['name'] + dsid_name = dsid["name"] if dsid_name in self.cache: - logger.debug('Get the data set from cache: %s.', dsid_name) + logger.debug("Get the data set from cache: %s.", dsid_name) return self.cache[dsid_name] - if dsid_name in ['lon', 'lat'] and dsid_name not in self.nc: + if dsid_name in ["lon", "lat"] and dsid_name not in self.nc: # Get full resolution lon,lat from the reduced (tie points) grid lon, lat = self.upsample_geolocation() if dsid_name == "lon": @@ -160,7 +160,7 @@ def get_dataset(self, dsid, info): else: return lat - logger.debug('Reading %s.', dsid_name) + logger.debug("Reading %s.", dsid_name) file_key = self._get_filekeys(dsid_name, info) variable = self.nc[file_key] variable = self.remove_timedim(variable) @@ -204,91 +204,91 @@ def scale_dataset(self, variable, info): """ variable = remove_empties(variable) - scale = variable.attrs.get('scale_factor', np.array(1)) - offset = variable.attrs.get('add_offset', np.array(0)) - if '_FillValue' in variable.attrs: - variable.attrs['scaled_FillValue'] = variable.attrs['_FillValue'] * scale + offset + scale = variable.attrs.get("scale_factor", np.array(1)) + offset = variable.attrs.get("add_offset", np.array(0)) + if "_FillValue" in variable.attrs: + variable.attrs["scaled_FillValue"] = variable.attrs["_FillValue"] * scale + offset if np.issubdtype((scale + offset).dtype, np.floating) or np.issubdtype(variable.dtype, np.floating): variable = self._mask_variable(variable) attrs = variable.attrs.copy() variable = variable * scale + offset variable.attrs = attrs - if 'valid_range' in variable.attrs: - variable.attrs['valid_range'] = variable.attrs['valid_range'] * scale + offset + if "valid_range" in variable.attrs: + variable.attrs["valid_range"] = variable.attrs["valid_range"] * scale + offset - variable.attrs.pop('add_offset', None) - variable.attrs.pop('scale_factor', None) + variable.attrs.pop("add_offset", None) + variable.attrs.pop("scale_factor", None) - variable.attrs.update({'platform_name': self.platform_name, - 'sensor': self.sensor}) + variable.attrs.update({"platform_name": self.platform_name, + "sensor": self.sensor}) - if not variable.attrs.get('standard_name', '').endswith('status_flag'): + if not variable.attrs.get("standard_name", "").endswith("status_flag"): # TODO: do we really need to add units to everything ? - variable.attrs.setdefault('units', '1') + variable.attrs.setdefault("units", "1") - ancillary_names = variable.attrs.get('ancillary_variables', '') + ancillary_names = variable.attrs.get("ancillary_variables", "") try: - variable.attrs['ancillary_variables'] = ancillary_names.split() + variable.attrs["ancillary_variables"] = ancillary_names.split() except AttributeError: pass - if 'palette_meanings' in variable.attrs: + if "palette_meanings" in variable.attrs: variable = self._prepare_variable_for_palette(variable, info) - if 'standard_name' in info: - variable.attrs.setdefault('standard_name', info['standard_name']) + if "standard_name" in info: + variable.attrs.setdefault("standard_name", info["standard_name"]) variable = self._adjust_variable_for_legacy_software(variable) return variable @staticmethod def _mask_variable(variable): - if '_FillValue' in variable.attrs: + if "_FillValue" in variable.attrs: variable = variable.where( - variable != variable.attrs['_FillValue']) - variable.attrs['_FillValue'] = np.nan - if 'valid_range' in variable.attrs: + variable != variable.attrs["_FillValue"]) + variable.attrs["_FillValue"] = np.nan + if "valid_range" in variable.attrs: variable = variable.where( - variable <= variable.attrs['valid_range'][1]) + variable <= variable.attrs["valid_range"][1]) variable = variable.where( - variable >= variable.attrs['valid_range'][0]) - if 'valid_max' in variable.attrs: + variable >= variable.attrs["valid_range"][0]) + if "valid_max" in variable.attrs: variable = variable.where( - variable <= variable.attrs['valid_max']) - if 'valid_min' in variable.attrs: + variable <= variable.attrs["valid_max"]) + if "valid_min" in variable.attrs: variable = variable.where( - variable >= variable.attrs['valid_min']) + variable >= variable.attrs["valid_min"]) return variable def _prepare_variable_for_palette(self, variable, info): try: - so_dataset = self.nc[self._get_varname_in_file(info, info_type='scale_offset_dataset')] + so_dataset = self.nc[self._get_varname_in_file(info, info_type="scale_offset_dataset")] except KeyError: scale = 1 offset = 0 fill_value = 255 else: - scale = so_dataset.attrs['scale_factor'] - offset = so_dataset.attrs['add_offset'] - fill_value = so_dataset.attrs['_FillValue'] - variable.attrs['palette_meanings'] = [int(val) - for val in variable.attrs['palette_meanings'].split()] - - if fill_value not in variable.attrs['palette_meanings'] and 'fill_value_color' in variable.attrs: - variable.attrs['palette_meanings'] = [fill_value] + variable.attrs['palette_meanings'] - variable = xr.DataArray(da.vstack((np.array(variable.attrs['fill_value_color']), variable.data)), + scale = so_dataset.attrs["scale_factor"] + offset = so_dataset.attrs["add_offset"] + fill_value = so_dataset.attrs["_FillValue"] + variable.attrs["palette_meanings"] = [int(val) + for val in variable.attrs["palette_meanings"].split()] + + if fill_value not in variable.attrs["palette_meanings"] and "fill_value_color" in variable.attrs: + variable.attrs["palette_meanings"] = [fill_value] + variable.attrs["palette_meanings"] + variable = xr.DataArray(da.vstack((np.array(variable.attrs["fill_value_color"]), variable.data)), coords=variable.coords, dims=variable.dims, attrs=variable.attrs) - val, idx = np.unique(variable.attrs['palette_meanings'], return_index=True) - variable.attrs['palette_meanings'] = val * scale + offset + val, idx = np.unique(variable.attrs["palette_meanings"], return_index=True) + variable.attrs["palette_meanings"] = val * scale + offset variable = variable[idx] return variable def _adjust_variable_for_legacy_software(self, variable): - if self.sw_version == 'NWC/PPS version v2014' and variable.attrs.get('standard_name') == 'cloud_top_altitude': + if self.sw_version == "NWC/PPS version v2014" and variable.attrs.get("standard_name") == "cloud_top_altitude": # pps 2014 valid range and palette don't match - variable.attrs['valid_range'] = (0., 9000.) - if (self.sw_version == 'NWC/PPS version v2014' and - variable.attrs.get('long_name') == 'RGB Palette for ctth_alti'): + variable.attrs["valid_range"] = (0., 9000.) + if (self.sw_version == "NWC/PPS version v2014" and + variable.attrs.get("long_name") == "RGB Palette for ctth_alti"): # pps 2014 palette has the nodata color (black) first variable = variable[1:, :] return variable @@ -298,12 +298,12 @@ def _upsample_geolocation_uncached(self): from geotiepoints import SatelliteInterpolator # Read the fields needed: - col_indices = self.nc['nx_reduced'].values - row_indices = self.nc['ny_reduced'].values - lat_reduced = self.scale_dataset(self.nc['lat_reduced'], {}) - lon_reduced = self.scale_dataset(self.nc['lon_reduced'], {}) + col_indices = self.nc["nx_reduced"].values + row_indices = self.nc["ny_reduced"].values + lat_reduced = self.scale_dataset(self.nc["lat_reduced"], {}) + lon_reduced = self.scale_dataset(self.nc["lon_reduced"], {}) - shape = (self.nc['y'].shape[0], self.nc['x'].shape[0]) + shape = (self.nc["y"].shape[0], self.nc["x"].shape[0]) cols_full = np.arange(shape[1]) rows_full = np.arange(shape[0]) @@ -313,8 +313,8 @@ def _upsample_geolocation_uncached(self): (rows_full, cols_full)) lons, lats = satint.interpolate() - lon = xr.DataArray(lons, attrs=lon_reduced.attrs, dims=['y', 'x']) - lat = xr.DataArray(lats, attrs=lat_reduced.attrs, dims=['y', 'x']) + lon = xr.DataArray(lons, attrs=lon_reduced.attrs, dims=["y", "x"]) + lat = xr.DataArray(lats, attrs=lat_reduced.attrs, dims=["y", "x"]) lat = self.drop_xycoords(lat) lon = self.drop_xycoords(lon) return lon, lat @@ -328,15 +328,15 @@ def get_area_def(self, dsid): # PPS: raise NotImplementedError - if dsid['name'].endswith('_pal'): + if dsid["name"].endswith("_pal"): raise NotImplementedError crs, area_extent = self._get_projection() crs, area_extent = self._ensure_crs_extents_in_meters(crs, area_extent) - nlines, ncols = self.nc[dsid['name']].shape - area = AreaDefinition('some_area_name', + nlines, ncols = self.nc[dsid["name"]].shape + area = AreaDefinition("some_area_name", "On-the-fly area", - 'geosmsg', + "geosmsg", crs, ncols, nlines, @@ -347,7 +347,7 @@ def get_area_def(self, dsid): @staticmethod def _ensure_crs_extents_in_meters(crs, area_extent): """Fix units in Earth shape, satellite altitude and 'units' attribute.""" - if 'kilo' in crs.axis_info[0].unit_name: + if "kilo" in crs.axis_info[0].unit_name: proj_dict = crs.to_dict() proj_dict["units"] = "m" if "a" in proj_dict: @@ -372,12 +372,12 @@ def __del__(self): @property def start_time(self): """Return the start time of the object.""" - return read_nwcsaf_time(self.nc.attrs['time_coverage_start']) + return read_nwcsaf_time(self.nc.attrs["time_coverage_start"]) @property def end_time(self): """Return the end time of the object.""" - return read_nwcsaf_time(self.nc.attrs['time_coverage_end']) + return read_nwcsaf_time(self.nc.attrs["time_coverage_end"]) @property def sensor_names(self): @@ -387,26 +387,26 @@ def sensor_names(self): def _get_projection(self): """Get projection from the NetCDF4 attributes.""" try: - proj_str = self.nc.attrs['gdal_projection'] + proj_str = self.nc.attrs["gdal_projection"] except TypeError: - proj_str = self.nc.attrs['gdal_projection'].decode() + proj_str = self.nc.attrs["gdal_projection"].decode() # Check the a/b/h units - radius_a = proj_str.split('+a=')[-1].split()[0] + radius_a = proj_str.split("+a=")[-1].split()[0] if float(radius_a) > 10e3: - units = 'm' + units = "m" scale = 1.0 else: - units = 'km' + units = "km" scale = 1e3 - if 'units' not in proj_str: - proj_str = proj_str + ' +units=' + units + if "units" not in proj_str: + proj_str = proj_str + " +units=" + units - area_extent = (float(self.nc.attrs['gdal_xgeo_up_left']) / scale, - float(self.nc.attrs['gdal_ygeo_low_right']) / scale, - float(self.nc.attrs['gdal_xgeo_low_right']) / scale, - float(self.nc.attrs['gdal_ygeo_up_left']) / scale) + area_extent = (float(self.nc.attrs["gdal_xgeo_up_left"]) / scale, + float(self.nc.attrs["gdal_ygeo_low_right"]) / scale, + float(self.nc.attrs["gdal_xgeo_low_right"]) / scale, + float(self.nc.attrs["gdal_ygeo_up_left"]) / scale) crs = CRS.from_string(proj_str) return crs, area_extent @@ -427,9 +427,9 @@ def read_nwcsaf_time(time_value): try: # MSG: try: - return datetime.strptime(time_value, '%Y-%m-%dT%H:%M:%SZ') + return datetime.strptime(time_value, "%Y-%m-%dT%H:%M:%SZ") except TypeError: # Remove this in summer 2024 (this is not needed since h5netcdf 0.14) - return datetime.strptime(time_value.astype(str), '%Y-%m-%dT%H:%M:%SZ') + return datetime.strptime(time_value.astype(str), "%Y-%m-%dT%H:%M:%SZ") except ValueError: # PPS: - return datetime.strptime(time_value, '%Y%m%dT%H%M%S%fZ') + return datetime.strptime(time_value, "%Y%m%dT%H%M%S%fZ") diff --git a/satpy/readers/oceancolorcci_l3_nc.py b/satpy/readers/oceancolorcci_l3_nc.py index b4ce1f7772..075e885b36 100644 --- a/satpy/readers/oceancolorcci_l3_nc.py +++ b/satpy/readers/oceancolorcci_l3_nc.py @@ -46,48 +46,48 @@ def _parse_datetime(datestr): @property def start_time(self): """Get the start time.""" - return self._parse_datetime(self['/attr/time_coverage_start']) + return self._parse_datetime(self["/attr/time_coverage_start"]) @property def end_time(self): """Get the end time.""" - return self._parse_datetime(self['/attr/time_coverage_end']) + return self._parse_datetime(self["/attr/time_coverage_end"]) @property def composite_period(self): """Determine composite period from filename information.""" - comp1 = self.filename_info['composite_period_1'] - comp2 = self.filename_info['composite_period_2'] - if comp2 == 'MONTHLY' and comp1 == "1M": - return 'monthly' - elif comp1 == '1D': - return 'daily' - elif comp1 == '5D': - return '5-day' - elif comp1 == '8D': - return '8-day' + comp1 = self.filename_info["composite_period_1"] + comp2 = self.filename_info["composite_period_2"] + if comp2 == "MONTHLY" and comp1 == "1M": + return "monthly" + elif comp1 == "1D": + return "daily" + elif comp1 == "5D": + return "5-day" + elif comp1 == "8D": + return "8-day" else: raise ValueError(f"Unknown data compositing period: {comp1}_{comp2}") def _update_attrs(self, dataset, dataset_info): """Update dataset attributes.""" - dataset.attrs.update(self[dataset_info['nc_key']].attrs) + dataset.attrs.update(self[dataset_info["nc_key"]].attrs) dataset.attrs.update(dataset_info) - dataset.attrs['sensor'] = 'merged' - dataset.attrs['composite_period'] = self.composite_period + dataset.attrs["sensor"] = "merged" + dataset.attrs["composite_period"] = self.composite_period # remove attributes from original file which don't apply anymore dataset.attrs.pop("nc_key") def get_dataset(self, dataset_id, ds_info): """Get dataset.""" - dataset = da.squeeze(self[ds_info['nc_key']]) - if '_FillValue' in dataset.attrs: - dataset.data = da.where(dataset.data == dataset.attrs['_FillValue'], np.nan, dataset.data) + dataset = da.squeeze(self[ds_info["nc_key"]]) + if "_FillValue" in dataset.attrs: + dataset.data = da.where(dataset.data == dataset.attrs["_FillValue"], np.nan, dataset.data) self._update_attrs(dataset, ds_info) - if 'lat' in dataset.dims: - dataset = dataset.rename({'lat': 'y'}) - if 'lon' in dataset.dims: - dataset = dataset.rename({'lon': 'x'}) + if "lat" in dataset.dims: + dataset = dataset.rename({"lat": "y"}) + if "lon" in dataset.dims: + dataset = dataset.rename({"lon": "x"}) return dataset def get_area_def(self, dsid): @@ -96,23 +96,23 @@ def get_area_def(self, dsid): There is no area definition in the file itself, so we have to compute it from the metadata, which specifies the area extent and pixel resolution. """ - proj_param = 'EPSG:4326' + proj_param = "EPSG:4326" - lon_res = float(self['/attr/geospatial_lon_resolution']) - lat_res = float(self['/attr/geospatial_lat_resolution']) + lon_res = float(self["/attr/geospatial_lon_resolution"]) + lat_res = float(self["/attr/geospatial_lat_resolution"]) - min_lon = self['/attr/geospatial_lon_min'] - max_lon = self['/attr/geospatial_lon_max'] - min_lat = self['/attr/geospatial_lat_min'] - max_lat = self['/attr/geospatial_lat_max'] + min_lon = self["/attr/geospatial_lon_min"] + max_lon = self["/attr/geospatial_lon_max"] + min_lat = self["/attr/geospatial_lat_min"] + max_lat = self["/attr/geospatial_lat_max"] area_extent = (min_lon, min_lat, max_lon, max_lat) lon_size = np.round((max_lon - min_lon) / lon_res).astype(int) lat_size = np.round((max_lat - min_lat) / lat_res).astype(int) - area = geometry.AreaDefinition('gridded_occci', - 'Full globe gridded area', - 'longlat', + area = geometry.AreaDefinition("gridded_occci", + "Full globe gridded area", + "longlat", proj_param, lon_size, lat_size, diff --git a/satpy/readers/olci_nc.py b/satpy/readers/olci_nc.py index 112f5455ac..84b21c3284 100644 --- a/satpy/readers/olci_nc.py +++ b/satpy/readers/olci_nc.py @@ -59,9 +59,9 @@ CHUNK_SIZE = get_legacy_chunk_size() -PLATFORM_NAMES = {'S3A': 'Sentinel-3A', - 'S3B': 'Sentinel-3B', - 'ENV': 'Environmental Satellite'} +PLATFORM_NAMES = {"S3A": "Sentinel-3A", + "S3B": "Sentinel-3B", + "ENV": "Environmental Satellite"} class BitFlags: @@ -70,16 +70,16 @@ class BitFlags: def __init__(self, value, flag_list=None): """Init the flags.""" self._value = value - flag_list = flag_list or ['INVALID', 'WATER', 'LAND', 'CLOUD', 'SNOW_ICE', - 'INLAND_WATER', 'TIDAL', 'COSMETIC', 'SUSPECT', - 'HISOLZEN', 'SATURATED', 'MEGLINT', 'HIGHGLINT', - 'WHITECAPS', 'ADJAC', 'WV_FAIL', 'PAR_FAIL', - 'AC_FAIL', 'OC4ME_FAIL', 'OCNN_FAIL', - 'Extra_1', - 'KDM_FAIL', - 'Extra_2', - 'CLOUD_AMBIGUOUS', 'CLOUD_MARGIN', 'BPAC_ON', 'WHITE_SCATT', - 'LOWRW', 'HIGHRW'] + flag_list = flag_list or ["INVALID", "WATER", "LAND", "CLOUD", "SNOW_ICE", + "INLAND_WATER", "TIDAL", "COSMETIC", "SUSPECT", + "HISOLZEN", "SATURATED", "MEGLINT", "HIGHGLINT", + "WHITECAPS", "ADJAC", "WV_FAIL", "PAR_FAIL", + "AC_FAIL", "OC4ME_FAIL", "OCNN_FAIL", + "Extra_1", + "KDM_FAIL", + "Extra_2", + "CLOUD_AMBIGUOUS", "CLOUD_MARGIN", "BPAC_ON", "WHITE_SCATT", + "LOWRW", "HIGHRW"] self.meaning = {f: i for i, f in enumerate(flag_list)} def __getitem__(self, item): @@ -108,11 +108,11 @@ def __init__(self, filename, filename_info, filetype_info, """Init the olci reader base.""" super().__init__(filename, filename_info, filetype_info) self._engine = engine - self._start_time = filename_info['start_time'] - self._end_time = filename_info['end_time'] + self._start_time = filename_info["start_time"] + self._end_time = filename_info["end_time"] # TODO: get metadata from the manifest file (xfdumanifest.xml) - self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] - self.sensor = 'olci' + self.platform_name = PLATFORM_NAMES[filename_info["mission_id"]] + self.sensor = "olci" @cached_property def nc(self): @@ -124,7 +124,7 @@ def nc(self): engine=self._engine, chunks={self.cols_name: CHUNK_SIZE, self.rows_name: CHUNK_SIZE}) - return dataset.rename({self.cols_name: 'x', self.rows_name: 'y'}) + return dataset.rename({self.cols_name: "x", self.rows_name: "y"}) @property def start_time(self): @@ -138,8 +138,8 @@ def end_time(self): def get_dataset(self, key, info): """Load a dataset.""" - logger.debug('Reading %s.', key['name']) - variable = self.nc[key['name']] + logger.debug("Reading %s.", key["name"]) + variable = self.nc[key["name"]] return variable @@ -158,9 +158,9 @@ class NCOLCIChannelBase(NCOLCIBase): def __init__(self, filename, filename_info, filetype_info, engine=None): """Init the file handler.""" super().__init__(filename, filename_info, filetype_info, engine) - self.channel = filename_info.get('dataset_name') - self.reflectance_prefix = 'Oa' - self.reflectance_suffix = '_reflectance' + self.channel = filename_info.get("dataset_name") + self.reflectance_prefix = "Oa" + self.reflectance_suffix = "_reflectance" class NCOLCI1B(NCOLCIChannelBase): @@ -178,28 +178,28 @@ def _get_items(idx, solar_flux): def _get_solar_flux(self, band): """Get the solar flux for the band.""" - solar_flux = self.cal['solar_flux'].isel(bands=band).values - d_index = self.cal['detector_index'].fillna(0).astype(int) + solar_flux = self.cal["solar_flux"].isel(bands=band).values + d_index = self.cal["detector_index"].fillna(0).astype(int) return da.map_blocks(self._get_items, d_index.data, solar_flux=solar_flux, dtype=solar_flux.dtype) def get_dataset(self, key, info): """Load a dataset.""" - if self.channel != key['name']: + if self.channel != key["name"]: return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) - radiances = self.nc[self.channel + '_radiance'] + radiances = self.nc[self.channel + "_radiance"] - if key['calibration'] == 'reflectance': - idx = int(key['name'][2:]) - 1 + if key["calibration"] == "reflectance": + idx = int(key["name"][2:]) - 1 sflux = self._get_solar_flux(idx) radiances = radiances / sflux * np.pi * 100 - radiances.attrs['units'] = '%' + radiances.attrs["units"] = "%" - radiances.attrs['platform_name'] = self.platform_name - radiances.attrs['sensor'] = self.sensor + radiances.attrs["platform_name"] = self.platform_name + radiances.attrs["sensor"] = self.sensor radiances.attrs.update(key.to_dict()) return radiances @@ -215,20 +215,20 @@ def __init__(self, filename, filename_info, filetype_info, engine=None, unlog=Fa def get_dataset(self, key, info): """Load a dataset.""" - if self.channel is not None and self.channel != key['name']: + if self.channel is not None and self.channel != key["name"]: return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) if self.channel is not None and self.channel.startswith(self.reflectance_prefix): dataset = self.nc[self.channel + self.reflectance_suffix] else: - dataset = self.nc[info['nc_key']] + dataset = self.nc[info["nc_key"]] - if key['name'] == 'wqsf': - dataset.attrs['_FillValue'] = 1 - elif key['name'] == 'mask': + if key["name"] == "wqsf": + dataset.attrs["_FillValue"] = 1 + elif key["name"] == "mask": dataset = self.getbitmask(dataset, self.mask_items) - dataset.attrs['platform_name'] = self.platform_name - dataset.attrs['sensor'] = self.sensor + dataset.attrs["platform_name"] = self.platform_name + dataset.attrs["sensor"] = self.sensor dataset.attrs.update(key.to_dict()) if self.unlog: dataset = self.delog(dataset) @@ -262,8 +262,8 @@ def __init__(self, filename, filename_info, filetype_info, engine=None, **kwargs): """Init the file handler.""" super().__init__(filename, filename_info, filetype_info, engine) - self.l_step = self.nc.attrs['al_subsampling_factor'] - self.c_step = self.nc.attrs['ac_subsampling_factor'] + self.l_step = self.nc.attrs["al_subsampling_factor"] + self.c_step = self.nc.attrs["ac_subsampling_factor"] def _do_interpolate(self, data): @@ -287,7 +287,7 @@ def _do_interpolate(self, data): int_data = satint.interpolate() return [xr.DataArray(da.from_array(x, chunks=(CHUNK_SIZE, CHUNK_SIZE)), - dims=['y', 'x']) for x in int_data] + dims=["y", "x"]) for x in int_data] @property def _need_interpolation(self): @@ -297,37 +297,37 @@ def _need_interpolation(self): class NCOLCIAngles(NCOLCILowResData): """File handler for the OLCI angles.""" - datasets = {'satellite_azimuth_angle': 'OAA', - 'satellite_zenith_angle': 'OZA', - 'solar_azimuth_angle': 'SAA', - 'solar_zenith_angle': 'SZA'} + datasets = {"satellite_azimuth_angle": "OAA", + "satellite_zenith_angle": "OZA", + "solar_azimuth_angle": "SAA", + "solar_zenith_angle": "SZA"} def get_dataset(self, key, info): """Load a dataset.""" - if key['name'] not in self.datasets: + if key["name"] not in self.datasets: return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) if self._need_interpolation: - if key['name'].startswith('satellite'): + if key["name"].startswith("satellite"): azi, zen = self.satellite_angles - elif key['name'].startswith('solar'): + elif key["name"].startswith("solar"): azi, zen = self.sun_angles else: - raise NotImplementedError("Don't know how to read " + key['name']) + raise NotImplementedError("Don't know how to read " + key["name"]) - if 'zenith' in key['name']: + if "zenith" in key["name"]: values = zen - elif 'azimuth' in key['name']: + elif "azimuth" in key["name"]: values = azi else: - raise NotImplementedError("Don't know how to read " + key['name']) + raise NotImplementedError("Don't know how to read " + key["name"]) else: - values = self.nc[self.datasets[key['name']]] + values = self.nc[self.datasets[key["name"]]] - values.attrs['platform_name'] = self.platform_name - values.attrs['sensor'] = self.sensor + values.attrs["platform_name"] = self.platform_name + values.attrs["sensor"] = self.sensor values.attrs.update(key.to_dict()) return values @@ -335,16 +335,16 @@ def get_dataset(self, key, info): @cached_property def sun_angles(self): """Return the sun angles.""" - zen = self.nc[self.datasets['solar_zenith_angle']] - azi = self.nc[self.datasets['solar_azimuth_angle']] + zen = self.nc[self.datasets["solar_zenith_angle"]] + azi = self.nc[self.datasets["solar_azimuth_angle"]] azi, zen = self._interpolate_angles(azi, zen) return azi, zen @cached_property def satellite_angles(self): """Return the satellite angles.""" - zen = self.nc[self.datasets['satellite_zenith_angle']] - azi = self.nc[self.datasets['satellite_azimuth_angle']] + zen = self.nc[self.datasets["satellite_zenith_angle"]] + azi = self.nc[self.datasets["satellite_azimuth_angle"]] azi, zen = self._interpolate_angles(azi, zen) return azi, zen @@ -362,7 +362,7 @@ def _interpolate_angles(self, azi, zen): class NCOLCIMeteo(NCOLCILowResData): """File handler for the OLCI meteo data.""" - datasets = ['humidity', 'sea_level_pressure', 'total_columnar_water_vapour', 'total_ozone'] + datasets = ["humidity", "sea_level_pressure", "total_columnar_water_vapour", "total_ozone"] def __init__(self, filename, filename_info, filetype_info, engine=None): @@ -377,27 +377,27 @@ def __init__(self, filename, filename_info, filetype_info, def get_dataset(self, key, info): """Load a dataset.""" - if key['name'] not in self.datasets: + if key["name"] not in self.datasets: return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) - if self._need_interpolation and self.cache.get(key['name']) is None: + if self._need_interpolation and self.cache.get(key["name"]) is None: - data = self.nc[key['name']] + data = self.nc[key["name"]] values, = self._do_interpolate(data) values.attrs = data.attrs - self.cache[key['name']] = values + self.cache[key["name"]] = values - elif key['name'] in self.cache: - values = self.cache[key['name']] + elif key["name"] in self.cache: + values = self.cache[key["name"]] else: - values = self.nc[key['name']] + values = self.nc[key["name"]] - values.attrs['platform_name'] = self.platform_name - values.attrs['sensor'] = self.sensor + values.attrs["platform_name"] = self.platform_name + values.attrs["sensor"] = self.sensor values.attrs.update(key.to_dict()) return values diff --git a/satpy/readers/omps_edr.py b/satpy/readers/omps_edr.py index 9de71d4efa..5421ae2cd2 100644 --- a/satpy/readers/omps_edr.py +++ b/satpy/readers/omps_edr.py @@ -36,26 +36,26 @@ class EDRFileHandler(HDF5FileHandler): @property def start_orbit_number(self): """Get the start orbit number.""" - return self.filename_info['orbit'] + return self.filename_info["orbit"] @property def end_orbit_number(self): """Get the end orbit number.""" - return self.filename_info['orbit'] + return self.filename_info["orbit"] @property def platform_name(self): """Get the platform name.""" - return self.filename_info['platform_shortname'] + return self.filename_info["platform_shortname"] @property def sensor_name(self): """Get the sensor name.""" - return self.filename_info['instrument_shortname'] + return self.filename_info["instrument_shortname"] def get_shape(self, ds_id, ds_info): """Get the shape.""" - return self[ds_info['file_key'] + '/shape'] + return self[ds_info["file_key"] + "/shape"] def adjust_scaling_factors(self, factors, file_units, output_units): """Adjust scaling factors.""" @@ -68,20 +68,20 @@ def adjust_scaling_factors(self, factors, file_units, output_units): def get_metadata(self, dataset_id, ds_info): """Get the metadata.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) - info = getattr(self[var_path], 'attrs', {}).copy() - info.pop('DIMENSION_LIST', None) + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) + info = getattr(self[var_path], "attrs", {}).copy() + info.pop("DIMENSION_LIST", None) info.update(ds_info) - file_units = ds_info.get('file_units') + file_units = ds_info.get("file_units") if file_units is None: - file_units = self.get(var_path + '/attr/units', self.get(var_path + '/attr/Units')) + file_units = self.get(var_path + "/attr/units", self.get(var_path + "/attr/Units")) if file_units is None: raise KeyError("File variable '{}' has no units attribute".format(var_path)) - if file_units == 'deg': - file_units = 'degrees' - elif file_units == 'Unitless': - file_units = '1' + if file_units == "deg": + file_units = "degrees" + elif file_units == "Unitless": + file_units = "1" info.update({ "shape": self.get_shape(dataset_id, ds_info), @@ -93,32 +93,32 @@ def get_metadata(self, dataset_id, ds_info): "end_orbit": self.end_orbit_number, }) info.update(dataset_id.to_dict()) - if 'standard_name' not in ds_info: - info['standard_name'] = self.get(var_path + '/attr/Title', dataset_id['name']) + if "standard_name" not in ds_info: + info["standard_name"] = self.get(var_path + "/attr/Title", dataset_id["name"]) return info def get_dataset(self, dataset_id, ds_info): """Get the dataset.""" - var_path = ds_info.get('file_key', '{}'.format(dataset_id['name'])) + var_path = ds_info.get("file_key", "{}".format(dataset_id["name"])) metadata = self.get_metadata(dataset_id, ds_info) - valid_min, valid_max = self.get(var_path + '/attr/valid_range', - self.get(var_path + '/attr/ValidRange', (None, None))) + valid_min, valid_max = self.get(var_path + "/attr/valid_range", + self.get(var_path + "/attr/ValidRange", (None, None))) if valid_min is None or valid_max is None: - valid_min = self.get(var_path + '/attr/valid_min', None) - valid_max = self.get(var_path + '/attr/valid_max', None) + valid_min = self.get(var_path + "/attr/valid_min", None) + valid_max = self.get(var_path + "/attr/valid_max", None) if valid_min is None or valid_max is None: raise KeyError("File variable '{}' has no valid range attribute".format(var_path)) - fill_name = var_path + '/attr/{}'.format(self._fill_name) + fill_name = var_path + "/attr/{}".format(self._fill_name) if fill_name in self: fill_value = self[fill_name] else: fill_value = None data = self[var_path] - scale_factor_path = var_path + '/attr/ScaleFactor' + scale_factor_path = var_path + "/attr/ScaleFactor" if scale_factor_path in self: scale_factor = self[scale_factor_path] - scale_offset = self[var_path + '/attr/Offset'] + scale_offset = self[var_path + "/attr/Offset"] else: scale_factor = None scale_offset = None @@ -130,14 +130,14 @@ def get_dataset(self, dataset_id, ds_info): data = data.where(data != fill_value) factors = (scale_factor, scale_offset) - factors = self.adjust_scaling_factors(factors, metadata['file_units'], ds_info.get("units")) + factors = self.adjust_scaling_factors(factors, metadata["file_units"], ds_info.get("units")) if factors[0] != 1 or factors[1] != 0: data = data * factors[0] + factors[1] data.attrs.update(metadata) - if 'DIMENSION_LIST' in data.attrs: - data.attrs.pop('DIMENSION_LIST') - dimensions = self.get_reference(var_path, 'DIMENSION_LIST') + if "DIMENSION_LIST" in data.attrs: + data.attrs.pop("DIMENSION_LIST") + dimensions = self.get_reference(var_path, "DIMENSION_LIST") for dim, coord in zip(data.dims, dimensions): data.coords[dim] = coord[0] return data diff --git a/satpy/readers/safe_sar_l2_ocn.py b/satpy/readers/safe_sar_l2_ocn.py index 267f8683f8..c5b63dd8be 100644 --- a/satpy/readers/safe_sar_l2_ocn.py +++ b/satpy/readers/safe_sar_l2_ocn.py @@ -45,15 +45,15 @@ def __init__(self, filename, filename_info, filetype_info): super(SAFENC, self).__init__(filename, filename_info, filetype_info) - self._start_time = filename_info['start_time'] - self._end_time = filename_info['end_time'] + self._start_time = filename_info["start_time"] + self._end_time = filename_info["end_time"] # For some SAFE packages, fstart_time differs, but start_time is the same # To avoid over writing exiting file with same start_time, a solution is to # use fstart_time - self._fstart_time = filename_info['fstart_time'] - self._fend_time = filename_info['fend_time'] + self._fstart_time = filename_info["fstart_time"] + self._fend_time = filename_info["fend_time"] - self._polarization = filename_info['polarization'] + self._polarization = filename_info["polarization"] self.lats = None self.lons = None @@ -63,19 +63,19 @@ def __init__(self, filename, filename_info, filetype_info): self.nc = xr.open_dataset(filename, decode_cf=True, mask_and_scale=False, - chunks={'owiAzSize': CHUNK_SIZE, - 'owiRaSize': CHUNK_SIZE}) - self.nc = self.nc.rename({'owiAzSize': 'y'}) - self.nc = self.nc.rename({'owiRaSize': 'x'}) + chunks={"owiAzSize": CHUNK_SIZE, + "owiRaSize": CHUNK_SIZE}) + self.nc = self.nc.rename({"owiAzSize": "y"}) + self.nc = self.nc.rename({"owiRaSize": "x"}) self.filename = filename def get_dataset(self, key, info): """Load a dataset.""" - if key['name'] in ['owiLat', 'owiLon']: + if key["name"] in ["owiLat", "owiLon"]: if self.lons is None or self.lats is None: - self.lons = self.nc['owiLon'] - self.lats = self.nc['owiLat'] - if key['name'] == 'owiLat': + self.lons = self.nc["owiLon"] + self.lats = self.nc["owiLat"] + if key["name"] == "owiLat": res = self.lats else: res = self.lons @@ -83,11 +83,11 @@ def get_dataset(self, key, info): else: res = self._get_data_channels(key, info) - if 'missionName' in self.nc.attrs: - res.attrs.update({'platform_name': self.nc.attrs['missionName']}) + if "missionName" in self.nc.attrs: + res.attrs.update({"platform_name": self.nc.attrs["missionName"]}) - res.attrs.update({'fstart_time': self._fstart_time}) - res.attrs.update({'fend_time': self._fend_time}) + res.attrs.update({"fstart_time": self._fstart_time}) + res.attrs.update({"fend_time": self._fend_time}) if not self._shape: self._shape = res.shape @@ -95,23 +95,23 @@ def get_dataset(self, key, info): return res def _get_data_channels(self, key, info): - res = self.nc[key['name']] - if key['name'] in ['owiHs', 'owiWl', 'owiDirmet']: - res = xr.DataArray(res, dims=['y', 'x', 'oswPartitions']) - elif key['name'] in ['owiNrcs', 'owiNesz', 'owiNrcsNeszCorr']: - res = xr.DataArray(res, dims=['y', 'x', 'oswPolarisation']) - elif key['name'] in ['owiPolarisationName']: - res = xr.DataArray(res, dims=['owiPolarisation']) - elif key['name'] in ['owiCalConstObsi', 'owiCalConstInci']: - res = xr.DataArray(res, dims=['owiIncSize']) - elif key['name'].startswith('owi'): - res = xr.DataArray(res, dims=['y', 'x']) + res = self.nc[key["name"]] + if key["name"] in ["owiHs", "owiWl", "owiDirmet"]: + res = xr.DataArray(res, dims=["y", "x", "oswPartitions"]) + elif key["name"] in ["owiNrcs", "owiNesz", "owiNrcsNeszCorr"]: + res = xr.DataArray(res, dims=["y", "x", "oswPolarisation"]) + elif key["name"] in ["owiPolarisationName"]: + res = xr.DataArray(res, dims=["owiPolarisation"]) + elif key["name"] in ["owiCalConstObsi", "owiCalConstInci"]: + res = xr.DataArray(res, dims=["owiIncSize"]) + elif key["name"].startswith("owi"): + res = xr.DataArray(res, dims=["y", "x"]) else: - res = xr.DataArray(res, dims=['y', 'x']) + res = xr.DataArray(res, dims=["y", "x"]) res.attrs.update(info) - if '_FillValue' in res.attrs: - res = res.where(res != res.attrs['_FillValue']) - res.attrs['_FillValue'] = np.nan + if "_FillValue" in res.attrs: + res = res.where(res != res.attrs["_FillValue"]) + res.attrs["_FillValue"] = np.nan return res @property diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 4b2d214187..19e5396b61 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -80,11 +80,11 @@ def _dictify(r): def _get_calibration_name(calibration): """Get the proper calibration name.""" - calibration_name = getattr(calibration, "name", calibration) or 'gamma' - if calibration_name == 'sigma_nought': - calibration_name = 'sigmaNought' - elif calibration_name == 'beta_nought': - calibration_name = 'betaNought' + calibration_name = getattr(calibration, "name", calibration) or "gamma" + if calibration_name == "sigma_nought": + calibration_name = "sigmaNought" + elif calibration_name == "beta_nought": + calibration_name = "betaNought" return calibration_name @@ -96,17 +96,17 @@ def __init__(self, filename, filename_info, filetype_info, """Init the xml filehandler.""" super(SAFEXML, self).__init__(filename, filename_info, filetype_info) - self._start_time = filename_info['start_time'] - self._end_time = filename_info['end_time'] - self._polarization = filename_info['polarization'] + self._start_time = filename_info["start_time"] + self._end_time = filename_info["end_time"] + self._polarization = filename_info["polarization"] self.root = ET.parse(self.filename) self.hdr = {} if header_file is not None: self.hdr = header_file.get_metadata() else: self.hdr = self.get_metadata() - self._image_shape = (self.hdr['product']['imageAnnotation']['imageInformation']['numberOfLines'], - self.hdr['product']['imageAnnotation']['imageInformation']['numberOfSamples']) + self._image_shape = (self.hdr["product"]["imageAnnotation"]["imageInformation"]["numberOfLines"], + self.hdr["product"]["imageAnnotation"]["imageInformation"]["numberOfSamples"]) def get_metadata(self): """Convert the xml metadata to dict.""" @@ -169,7 +169,7 @@ def get_dataset(self, key, info, chunks=None): def get_calibration_constant(self): """Load the calibration constant.""" - return float(self.root.find('.//absoluteCalibrationConstant').text) + return float(self.root.find(".//absoluteCalibrationConstant").text) def _get_calibration_uncached(self, calibration, chunks=None): """Get the calibration array.""" @@ -280,9 +280,9 @@ def _assemble_azimuth_noise_blocks(self, chunks): # relying mostly on dask arrays. slices = self._create_dask_slices_from_blocks(chunks) populated_array = da.vstack(slices).rechunk(chunks) - populated_array = xr.DataArray(populated_array, dims=['y', 'x'], - coords={'x': np.arange(self._image_shape[1]), - 'y': np.arange(self._image_shape[0])}) + populated_array = xr.DataArray(populated_array, dims=["y", "x"], + coords={"x": np.arange(self._image_shape[1]), + "y": np.arange(self._image_shape[0])}) return populated_array def _create_dask_slices_from_blocks(self, chunks): @@ -306,7 +306,7 @@ def _create_dask_slice_from_block_line(self, current_line, chunks): def _get_array_pieces_for_current_line(self, current_line): """Get the array pieces that cover the current line.""" current_blocks = self._find_blocks_covering_line(current_line) - current_blocks.sort(key=(lambda x: x.coords['x'][0])) + current_blocks.sort(key=(lambda x: x.coords["x"][0])) next_line = self._get_next_start_line(current_blocks, current_line) current_y = np.arange(current_line, next_line) pieces = [arr.sel(y=current_y) for arr in current_blocks] @@ -316,12 +316,12 @@ def _find_blocks_covering_line(self, current_line): """Find the blocks covering a given line.""" current_blocks = [] for block in self.blocks: - if block.coords['y'][0] <= current_line <= block.coords['y'][-1]: + if block.coords["y"][0] <= current_line <= block.coords["y"][-1]: current_blocks.append(block) return current_blocks def _get_next_start_line(self, current_blocks, current_line): - next_line = min((arr.coords['y'][-1] for arr in current_blocks)) + 1 + next_line = min((arr.coords["y"][-1] for arr in current_blocks)) + 1 blocks_starting_soon = [block for block in self.blocks if current_line < block.coords["y"][0] < next_line] if blocks_starting_soon: next_start_line = min((arr.coords["y"][0] for arr in blocks_starting_soon)) @@ -330,21 +330,21 @@ def _get_next_start_line(self, current_blocks, current_line): def _get_padded_dask_pieces(self, pieces, chunks): """Get the padded pieces of a slice.""" - pieces = sorted(pieces, key=(lambda x: x.coords['x'][0])) + pieces = sorted(pieces, key=(lambda x: x.coords["x"][0])) dask_pieces = [] previous_x_end = -1 piece = pieces[0] - next_x_start = piece.coords['x'][0].item() - y_shape = len(piece.coords['y']) + next_x_start = piece.coords["x"][0].item() + y_shape = len(piece.coords["y"]) x_shape = (next_x_start - previous_x_end - 1) self._fill_dask_pieces(dask_pieces, (y_shape, x_shape), chunks) for i, piece in enumerate(pieces): dask_pieces.append(piece.data) - previous_x_end = piece.coords['x'][-1].item() + previous_x_end = piece.coords["x"][-1].item() try: - next_x_start = pieces[i + 1].coords['x'][0].item() + next_x_start = pieces[i + 1].coords["x"][0].item() except IndexError: next_x_start = self._image_shape[1] @@ -405,35 +405,35 @@ def expand(self, chunks): new_arr = (da.ones((len(y_coord), len(x_coord)), chunks=chunks) * np.interp(y_coord, self.lines, data)[:, np.newaxis]) new_arr = xr.DataArray(new_arr, - dims=['y', 'x'], - coords={'x': x_coord, - 'y': y_coord}) + dims=["y", "x"], + coords={"x": x_coord, + "y": y_coord}) return new_arr @property def first_pixel(self): - return int(self.element.find('firstRangeSample').text) + return int(self.element.find("firstRangeSample").text) @property def last_pixel(self): - return int(self.element.find('lastRangeSample').text) + return int(self.element.find("lastRangeSample").text) @property def first_line(self): - return int(self.element.find('firstAzimuthLine').text) + return int(self.element.find("firstAzimuthLine").text) @property def last_line(self): - return int(self.element.find('lastAzimuthLine').text) + return int(self.element.find("lastAzimuthLine").text) @property def lines(self): - lines = self.element.find('line').text.split() + lines = self.element.find("line").text.split() return np.array(lines).astype(int) @property def lut(self): - lut = self.element.find('noiseAzimuthLut').text.split() + lut = self.element.find("noiseAzimuthLut").text.split() return np.array(lut).astype(float) @@ -458,8 +458,8 @@ def _read_xml_array(self): x = [] data = [] for elt in elements: - new_x = elt.find('pixel').text.split() - y += [int(elt.find('line').text)] * len(new_x) + new_x = elt.find("pixel").text.split() + y += [int(elt.find("line").text)] * len(new_x) x += [int(val) for val in new_x] data += [float(val) for val in elt.find(self.element_tag).text.split()] @@ -488,7 +488,7 @@ def interpolate_xarray(xpoints, ypoints, values, shape, hchunks = range(0, shape[1], blocksize) token = tokenize(blocksize, xpoints, ypoints, values, shape) - name = 'interpolate-' + token + name = "interpolate-" + token spline = RectBivariateSpline(xpoints, ypoints, values.T) @@ -507,7 +507,7 @@ def interpolator(xnew, ynew): res = da.Array(dskx, name, shape=list(shape), chunks=(blocksize, blocksize), dtype=values.dtype) - return DataArray(res, dims=('y', 'x')) + return DataArray(res, dims=("y", "x")) def intp(grid_x, grid_y, interpolator): @@ -536,7 +536,7 @@ def interpolate_xarray_linear(xpoints, ypoints, values, shape, chunks=CHUNK_SIZE interpolator((0, 0)) res = da.map_blocks(intp, grid_x, grid_y, interpolator=interpolator) - return DataArray(res, dims=('y', 'x')) + return DataArray(res, dims=("y", "x")) class SAFEGRD(BaseFileHandler): @@ -552,19 +552,19 @@ def __init__(self, filename, filename_info, filetype_info, calfh, noisefh, annot super(SAFEGRD, self).__init__(filename, filename_info, filetype_info) - self._start_time = filename_info['start_time'] - self._end_time = filename_info['end_time'] + self._start_time = filename_info["start_time"] + self._end_time = filename_info["end_time"] - self._polarization = filename_info['polarization'] + self._polarization = filename_info["polarization"] - self._mission_id = filename_info['mission_id'] + self._mission_id = filename_info["mission_id"] self.calibration = calfh self.noise = noisefh self.annotation = annotationfh self.read_lock = Lock() - self.filehandle = rasterio.open(self.filename, 'r', sharing=False) + self.filehandle = rasterio.open(self.filename, "r", sharing=False) self.get_lonlatalts = functools.lru_cache(maxsize=2)( self._get_lonlatalts_uncached ) @@ -574,37 +574,37 @@ def get_dataset(self, key, info): if self._polarization != key["polarization"]: return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) - if key['name'] in ['longitude', 'latitude', 'altitude']: - logger.debug('Constructing coordinate arrays.') + if key["name"] in ["longitude", "latitude", "altitude"]: + logger.debug("Constructing coordinate arrays.") arrays = dict() - arrays['longitude'], arrays['latitude'], arrays['altitude'] = self.get_lonlatalts() + arrays["longitude"], arrays["latitude"], arrays["altitude"] = self.get_lonlatalts() - data = arrays[key['name']] + data = arrays[key["name"]] data.attrs.update(info) else: data = xr.open_dataset(self.filename, engine="rasterio", chunks={"band": 1, "y": CHUNK_SIZE, "x": CHUNK_SIZE})["band_data"].squeeze() - data = data.assign_coords(x=np.arange(len(data.coords['x'])), - y=np.arange(len(data.coords['y']))) + data = data.assign_coords(x=np.arange(len(data.coords["x"])), + y=np.arange(len(data.coords["y"]))) data = self._calibrate_and_denoise(data, key) data.attrs.update(info) - data.attrs.update({'platform_name': self._mission_id}) + data.attrs.update({"platform_name": self._mission_id}) - data = self._change_quantity(data, key['quantity']) + data = self._change_quantity(data, key["quantity"]) return data @staticmethod def _change_quantity(data, quantity): """Change quantity to dB if needed.""" - if quantity == 'dB': + if quantity == "dB": data.data = 10 * np.log10(data.data) - data.attrs['units'] = 'dB' + data.attrs["units"] = "dB" else: - data.attrs['units'] = '1' + data.attrs["units"] = "1" return data @@ -627,17 +627,17 @@ def _get_digital_number(self, data): def _denoise(self, dn, chunks): """Denoise the data.""" - logger.debug('Reading noise data.') + logger.debug("Reading noise data.") noise = self.noise.get_noise_correction(chunks=chunks).fillna(0) dn = dn - noise return dn def _calibrate(self, dn, chunks, key): """Calibrate the data.""" - logger.debug('Reading calibration data.') - cal = self.calibration.get_calibration(key['calibration'], chunks=chunks) + logger.debug("Reading calibration data.") + cal = self.calibration.get_calibration(key["calibration"], chunks=chunks) cal_constant = self.calibration.get_calibration_constant() - logger.debug('Calibrating.') + logger.debug("Calibrating.") data = ((dn + cal_constant) / (cal ** 2)).clip(min=0) return data @@ -661,12 +661,12 @@ def _get_lonlatalts_uncached(self): latitudes = interpolate_xarray(xpoints, ypoints, gcp_lats, band.shape) altitudes = interpolate_xarray(xpoints, ypoints, gcp_alts, band.shape) - longitudes.attrs['gcps'] = gcps - longitudes.attrs['crs'] = crs - latitudes.attrs['gcps'] = gcps - latitudes.attrs['crs'] = crs - altitudes.attrs['gcps'] = gcps - altitudes.attrs['crs'] = crs + longitudes.attrs["gcps"] = gcps + longitudes.attrs["crs"] = crs + latitudes.attrs["gcps"] = gcps + latitudes.attrs["crs"] = crs + altitudes.attrs["gcps"] = gcps + altitudes.attrs["crs"] = crs return longitudes, latitudes, altitudes diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index 7add1df265..7a26ead72b 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -36,9 +36,8 @@ '{platform_name}-{sensor}-{start_time:%Y%m%d%H%M%S}-{end_time:%Y%m%d%H%M%S}.nc' -Example -------- - +Example: +-------- Here is an example how to read the data in satpy: .. code-block:: python @@ -92,9 +91,8 @@ ''AVHRR-GAC_FDR_1C_{platform}_{start_time:%Y%m%dT%H%M%SZ}_{end_time:%Y%m%dT%H%M%SZ}_{processing_mode}_{disposition_mode}_{creation_time}_{version_int:04d}.nc' -Example -------- - +Example: +-------- Here is an example how to read the data in satpy: .. code-block:: python @@ -196,7 +194,7 @@ class SatpyCFFileHandler(BaseFileHandler): """File handler for Satpy's CF netCDF files.""" - def __init__(self, filename, filename_info, filetype_info, numeric_name_prefix='CHANNEL_'): + def __init__(self, filename, filename_info, filetype_info, numeric_name_prefix="CHANNEL_"): """Initialize file handler.""" super().__init__(filename, filename_info, filetype_info) self.engine = None @@ -205,12 +203,12 @@ def __init__(self, filename, filename_info, filetype_info, numeric_name_prefix=' @property def start_time(self): """Get start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def sensor_names(self): @@ -239,11 +237,11 @@ def _existing_datasets(self, configured_datasets=None): def fix_modifier_attr(self, ds_info): """Fix modifiers attribute.""" # Empty modifiers are read as [], which causes problems later - if 'modifiers' in ds_info and not ds_info['modifiers']: - ds_info['modifiers'] = () + if "modifiers" in ds_info and not ds_info["modifiers"]: + ds_info["modifiers"] = () try: try: - ds_info['modifiers'] = tuple(ds_info['modifiers'].split(' ')) + ds_info["modifiers"] = tuple(ds_info["modifiers"].split(" ")) except AttributeError: pass except KeyError: @@ -252,14 +250,14 @@ def fix_modifier_attr(self, ds_info): def _assign_ds_info(self, var_name, val): """Assign ds_info.""" ds_info = dict(val.attrs) - ds_info['file_type'] = self.filetype_info['file_type'] - ds_info['name'] = ds_info['nc_store_name'] = var_name - if 'original_name' in ds_info: - ds_info['name'] = ds_info['original_name'] + ds_info["file_type"] = self.filetype_info["file_type"] + ds_info["name"] = ds_info["nc_store_name"] = var_name + if "original_name" in ds_info: + ds_info["name"] = ds_info["original_name"] elif self._numeric_name_prefix and var_name.startswith(self._numeric_name_prefix): - ds_info['name'] = var_name.replace(self._numeric_name_prefix, '') + ds_info["name"] = var_name.replace(self._numeric_name_prefix, "") try: - ds_info['wavelength'] = WavelengthRange.from_cf(ds_info['wavelength']) + ds_info["wavelength"] = WavelengthRange.from_cf(ds_info["wavelength"]) except KeyError: pass return ds_info @@ -278,15 +276,15 @@ def _coordinate_datasets(self, configured_datasets=None): nc = xr.open_dataset(self.filename, engine=self.engine) for var_name, val in nc.coords.items(): ds_info = dict(val.attrs) - ds_info['file_type'] = self.filetype_info['file_type'] - ds_info['name'] = var_name + ds_info["file_type"] = self.filetype_info["file_type"] + ds_info["name"] = var_name self.fix_modifier_attr(ds_info) yield True, ds_info def _compare_attr(self, _ds_id_dict, key, data): - if key in ['name', 'modifiers']: + if key in ["name", "modifiers"]: return True - elif key == 'wavelength': + elif key == "wavelength": return _ds_id_dict[key] == WavelengthRange.from_cf(data.attrs[key]) else: return data.attrs[key] == _ds_id_dict[key] @@ -303,15 +301,15 @@ def _dataid_attrs_equal(self, ds_id, data): def get_dataset(self, ds_id, ds_info): """Get dataset.""" - logger.debug("Getting data for: %s", ds_id['name']) + logger.debug("Getting data for: %s", ds_id["name"]) nc = xr.open_dataset(self.filename, engine=self.engine, - chunks={'y': CHUNK_SIZE, 'x': CHUNK_SIZE}) - name = ds_info.get('nc_store_name', ds_id['name']) - data = nc[ds_info.get('file_key', name)] + chunks={"y": CHUNK_SIZE, "x": CHUNK_SIZE}) + name = ds_info.get("nc_store_name", ds_id["name"]) + data = nc[ds_info.get("file_key", name)] if not self._dataid_attrs_equal(ds_id, data): return - if name != ds_id['name']: - data = data.rename(ds_id['name']) + if name != ds_id["name"]: + data = data.rename(ds_id["name"]) data.attrs.update(nc.attrs) # For now add global attributes to all datasets if "orbital_parameters" in data.attrs: data.attrs["orbital_parameters"] = _str2dict(data.attrs["orbital_parameters"]) diff --git a/satpy/readers/scatsat1_l2b.py b/satpy/readers/scatsat1_l2b.py index e507cdb2bc..9989bf3d86 100644 --- a/satpy/readers/scatsat1_l2b.py +++ b/satpy/readers/scatsat1_l2b.py @@ -30,38 +30,38 @@ class SCATSAT1L2BFileHandler(BaseFileHandler): def __init__(self, filename, filename_info, filetype_info): super(SCATSAT1L2BFileHandler, self).__init__(filename, filename_info, filetype_info) self.h5f = h5py.File(self.filename, "r") - h5data = self.h5f['science_data'] + h5data = self.h5f["science_data"] - self.filename_info['start_time'] = datetime.strptime(h5data.attrs['Range Beginning Date'], '%Y-%jT%H:%M:%S.%f') - self.filename_info['end_time'] = datetime.strptime(h5data.attrs['Range Ending Date'], '%Y-%jT%H:%M:%S.%f') + self.filename_info["start_time"] = datetime.strptime(h5data.attrs["Range Beginning Date"], "%Y-%jT%H:%M:%S.%f") + self.filename_info["end_time"] = datetime.strptime(h5data.attrs["Range Ending Date"], "%Y-%jT%H:%M:%S.%f") self.lons = None self.lats = None - self.wind_speed_scale = float(h5data.attrs['Wind Speed Selection Scale']) - self.wind_direction_scale = float(h5data.attrs['Wind Direction Selection Scale']) - self.latitude_scale = float(h5data.attrs['Latitude Scale']) - self.longitude_scale = float(h5data.attrs['Longitude Scale']) + self.wind_speed_scale = float(h5data.attrs["Wind Speed Selection Scale"]) + self.wind_direction_scale = float(h5data.attrs["Wind Direction Selection Scale"]) + self.latitude_scale = float(h5data.attrs["Latitude Scale"]) + self.longitude_scale = float(h5data.attrs["Longitude Scale"]) def get_dataset(self, key, info): - h5data = self.h5f['science_data'] - stdname = info.get('standard_name') + h5data = self.h5f["science_data"] + stdname = info.get("standard_name") - if stdname in ['latitude', 'longitude']: + if stdname in ["latitude", "longitude"]: if self.lons is None or self.lats is None: - self.lons = h5data['Longitude'][:]*self.longitude_scale - self.lats = h5data['Latitude'][:]*self.latitude_scale + self.lons = h5data["Longitude"][:]*self.longitude_scale + self.lats = h5data["Latitude"][:]*self.latitude_scale - if info['standard_name'] == 'longitude': + if info["standard_name"] == "longitude": return Dataset(self.lons, id=key, **info) else: return Dataset(self.lats, id=key, **info) - if stdname in ['wind_speed']: - windspeed = h5data['Wind_speed_selection'][:, :] * self.wind_speed_scale + if stdname in ["wind_speed"]: + windspeed = h5data["Wind_speed_selection"][:, :] * self.wind_speed_scale return Dataset(windspeed, id=key, **info) - if stdname in ['wind_direction']: - wind_direction = h5data['Wind_direction_selection'][:, :] * self.wind_direction_scale + if stdname in ["wind_direction"]: + wind_direction = h5data["Wind_direction_selection"][:, :] * self.wind_direction_scale return Dataset(wind_direction, id=key, **info) diff --git a/satpy/readers/scmi.py b/satpy/readers/scmi.py index f53073c751..a4b8620f8b 100644 --- a/satpy/readers/scmi.py +++ b/satpy/readers/scmi.py @@ -54,7 +54,7 @@ CHUNK_SIZE = get_legacy_chunk_size() # NetCDF doesn't support multi-threaded reading, trick it by opening # as one whole chunk then split it up before we do any calculations -LOAD_CHUNK_SIZE = int(os.getenv('PYTROLL_LOAD_CHUNK_SIZE', -1)) +LOAD_CHUNK_SIZE = int(os.getenv("PYTROLL_LOAD_CHUNK_SIZE", -1)) logger = logging.getLogger(__name__) @@ -69,20 +69,20 @@ def __init__(self, filename, filename_info, filetype_info): self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=False, - chunks={'x': LOAD_CHUNK_SIZE, 'y': LOAD_CHUNK_SIZE}) - self.platform_name = self.nc.attrs['satellite_id'] + chunks={"x": LOAD_CHUNK_SIZE, "y": LOAD_CHUNK_SIZE}) + self.platform_name = self.nc.attrs["satellite_id"] self.sensor = self._get_sensor() - self.nlines = self.nc.dims['y'] - self.ncols = self.nc.dims['x'] + self.nlines = self.nc.dims["y"] + self.ncols = self.nc.dims["x"] self.coords = {} def _get_sensor(self): """Determine the sensor for this file.""" # sometimes Himawari-8 (or 9) data is stored in SCMI format - is_h8 = 'H8' in self.platform_name - is_h9 = 'H9' in self.platform_name + is_h8 = "H8" in self.platform_name + is_h9 = "H9" in self.platform_name is_ahi = is_h8 or is_h9 - return 'ahi' if is_ahi else 'abi' + return "ahi" if is_ahi else "abi" @property def sensor_names(self): @@ -99,9 +99,9 @@ def __getitem__(self, item): """ data = self.nc[item] attrs = data.attrs - factor = data.attrs.get('scale_factor') - offset = data.attrs.get('add_offset') - fill = data.attrs.get('_FillValue') + factor = data.attrs.get("scale_factor") + offset = data.attrs.get("add_offset") + fill = data.attrs.get("_FillValue") if fill is not None: data = data.where(data != fill) if factor is not None: @@ -114,8 +114,8 @@ def __getitem__(self, item): # handle coordinates (and recursive fun) new_coords = {} # 'time' dimension causes issues in other processing - if 'time' in data.coords: - data = data.drop_vars('time') + if "time" in data.coords: + data = data.drop_vars("time") if item in data.coords: self.coords[item] = data for coord_name in data.coords.keys(): @@ -131,60 +131,60 @@ def get_shape(self, key, info): def get_dataset(self, key, info): """Load a dataset.""" - logger.debug('Reading in get_dataset %s.', key['name']) - var_name = info.get('file_key', self.filetype_info.get('file_key')) + logger.debug("Reading in get_dataset %s.", key["name"]) + var_name = info.get("file_key", self.filetype_info.get("file_key")) if var_name: data = self[var_name] - elif 'Sectorized_CMI' in self.nc: - data = self['Sectorized_CMI'] - elif 'data' in self.nc: - data = self['data'] + elif "Sectorized_CMI" in self.nc: + data = self["Sectorized_CMI"] + elif "data" in self.nc: + data = self["data"] # NetCDF doesn't support multi-threaded reading, trick it by opening # as one whole chunk then split it up before we do any calculations - data = data.chunk({'x': CHUNK_SIZE, 'y': CHUNK_SIZE}) + data = data.chunk({"x": CHUNK_SIZE, "y": CHUNK_SIZE}) # convert to satpy standard units - factor = data.attrs.pop('scale_factor', 1) - offset = data.attrs.pop('add_offset', 0) - units = data.attrs.get('units', 1) + factor = data.attrs.pop("scale_factor", 1) + offset = data.attrs.pop("add_offset", 0) + units = data.attrs.get("units", 1) # the '*1' unit is some weird convention added/needed by AWIPS - if units in ['1', '*1'] and key['calibration'] == 'reflectance': + if units in ["1", "*1"] and key["calibration"] == "reflectance": data *= 100 factor *= 100 # used for valid_min/max - data.attrs['units'] = '%' + data.attrs["units"] = "%" # set up all the attributes that might be useful to the user/satpy - data.attrs.update({'platform_name': self.platform_name, - 'sensor': data.attrs.get('sensor', self.sensor), + data.attrs.update({"platform_name": self.platform_name, + "sensor": data.attrs.get("sensor", self.sensor), }) - if 'satellite_longitude' in self.nc.attrs: - data.attrs['orbital_parameters'] = { - 'projection_longitude': self.nc.attrs['satellite_longitude'], - 'projection_latitude': self.nc.attrs['satellite_latitude'], - 'projection_altitude': self.nc.attrs['satellite_altitude'], + if "satellite_longitude" in self.nc.attrs: + data.attrs["orbital_parameters"] = { + "projection_longitude": self.nc.attrs["satellite_longitude"], + "projection_latitude": self.nc.attrs["satellite_latitude"], + "projection_altitude": self.nc.attrs["satellite_altitude"], } - scene_id = self.nc.attrs.get('scene_id') + scene_id = self.nc.attrs.get("scene_id") if scene_id is not None: - data.attrs['scene_id'] = scene_id + data.attrs["scene_id"] = scene_id data.attrs.update(key.to_dict()) - data.attrs.pop('_FillValue', None) - if 'valid_min' in data.attrs: - vmin = data.attrs.pop('valid_min') - vmax = data.attrs.pop('valid_max') + data.attrs.pop("_FillValue", None) + if "valid_min" in data.attrs: + vmin = data.attrs.pop("valid_min") + vmax = data.attrs.pop("valid_max") vmin = vmin * factor + offset vmax = vmax * factor + offset - data.attrs['valid_min'] = vmin - data.attrs['valid_max'] = vmax + data.attrs["valid_min"] = vmin + data.attrs["valid_max"] = vmax return data def _get_cf_grid_mapping_var(self): """Figure out which grid mapping should be used.""" - gmaps = ['fixedgrid_projection', 'goes_imager_projection', - 'lambert_projection', 'polar_projection', - 'mercator_projection'] - if 'grid_mapping' in self.filename_info: - gmaps = [self.filename_info.get('grid_mapping')] + gmaps + gmaps = ["fixedgrid_projection", "goes_imager_projection", + "lambert_projection", "polar_projection", + "mercator_projection"] + if "grid_mapping" in self.filename_info: + gmaps = [self.filename_info.get("grid_mapping")] + gmaps for grid_mapping in gmaps: if grid_mapping in self.nc: return self.nc[grid_mapping] @@ -192,12 +192,12 @@ def _get_cf_grid_mapping_var(self): def _get_proj4_name(self, projection): """Map CF projection name to PROJ.4 name.""" - gmap_name = projection.attrs['grid_mapping_name'] + gmap_name = projection.attrs["grid_mapping_name"] proj = { - 'geostationary': 'geos', - 'lambert_conformal_conic': 'lcc', - 'polar_stereographic': 'stere', - 'mercator': 'merc', + "geostationary": "geos", + "lambert_conformal_conic": "lcc", + "polar_stereographic": "stere", + "mercator": "merc", }.get(gmap_name, gmap_name) return proj @@ -205,42 +205,42 @@ def _get_proj_specific_params(self, projection): """Convert CF projection parameters to PROJ.4 dict.""" proj = self._get_proj4_name(projection) proj_dict = { - 'proj': proj, - 'a': float(projection.attrs['semi_major_axis']), - 'b': float(projection.attrs['semi_minor_axis']), - 'units': 'm', + "proj": proj, + "a": float(projection.attrs["semi_major_axis"]), + "b": float(projection.attrs["semi_minor_axis"]), + "units": "m", } - if proj == 'geos': - proj_dict['h'] = float(projection.attrs['perspective_point_height']) - proj_dict['sweep'] = projection.attrs.get('sweep_angle_axis', 'y') - proj_dict['lon_0'] = float(projection.attrs['longitude_of_projection_origin']) - proj_dict['lat_0'] = float(projection.attrs.get('latitude_of_projection_origin', 0.0)) - elif proj == 'lcc': - proj_dict['lat_0'] = float(projection.attrs['standard_parallel']) - proj_dict['lon_0'] = float(projection.attrs['longitude_of_central_meridian']) - proj_dict['lat_1'] = float(projection.attrs['latitude_of_projection_origin']) - elif proj == 'stere': - proj_dict['lat_ts'] = float(projection.attrs['standard_parallel']) - proj_dict['lon_0'] = float(projection.attrs['straight_vertical_longitude_from_pole']) - proj_dict['lat_0'] = float(projection.attrs['latitude_of_projection_origin']) - elif proj == 'merc': - proj_dict['lat_ts'] = float(projection.attrs['standard_parallel']) - proj_dict['lat_0'] = proj_dict['lat_ts'] - proj_dict['lon_0'] = float(projection.attrs['longitude_of_projection_origin']) + if proj == "geos": + proj_dict["h"] = float(projection.attrs["perspective_point_height"]) + proj_dict["sweep"] = projection.attrs.get("sweep_angle_axis", "y") + proj_dict["lon_0"] = float(projection.attrs["longitude_of_projection_origin"]) + proj_dict["lat_0"] = float(projection.attrs.get("latitude_of_projection_origin", 0.0)) + elif proj == "lcc": + proj_dict["lat_0"] = float(projection.attrs["standard_parallel"]) + proj_dict["lon_0"] = float(projection.attrs["longitude_of_central_meridian"]) + proj_dict["lat_1"] = float(projection.attrs["latitude_of_projection_origin"]) + elif proj == "stere": + proj_dict["lat_ts"] = float(projection.attrs["standard_parallel"]) + proj_dict["lon_0"] = float(projection.attrs["straight_vertical_longitude_from_pole"]) + proj_dict["lat_0"] = float(projection.attrs["latitude_of_projection_origin"]) + elif proj == "merc": + proj_dict["lat_ts"] = float(projection.attrs["standard_parallel"]) + proj_dict["lat_0"] = proj_dict["lat_ts"] + proj_dict["lon_0"] = float(projection.attrs["longitude_of_projection_origin"]) else: raise ValueError("Can't handle projection '{}'".format(proj)) return proj_dict def _calc_extents(self, proj_dict): """Calculate area extents from x/y variables.""" - h = float(proj_dict.get('h', 1.)) # force to 64-bit float - x = self['x'] - y = self['y'] - x_units = x.attrs.get('units', 'rad') - if x_units == 'meters': + h = float(proj_dict.get("h", 1.)) # force to 64-bit float + x = self["x"] + y = self["y"] + x_units = x.attrs.get("units", "rad") + if x_units == "meters": h_factor = 1. factor = 1. - elif x_units == 'microradian': + elif x_units == "microradian": h_factor = h factor = 1e6 else: # radians @@ -260,7 +260,7 @@ def get_area_def(self, key): projection = self._get_cf_grid_mapping_var() proj_dict = self._get_proj_specific_params(projection) area_extent = self._calc_extents(proj_dict) - area_name = '{}_{}'.format(self.sensor, proj_dict['proj']) + area_name = "{}_{}".format(self.sensor, proj_dict["proj"]) return geometry.AreaDefinition( area_name, "SCMI file area", @@ -273,7 +273,7 @@ def get_area_def(self, key): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs['start_date_time'], '%Y%j%H%M%S') + return datetime.strptime(self.nc.attrs["start_date_time"], "%Y%j%H%M%S") @property def end_time(self): diff --git a/satpy/readers/seadas_l2.py b/satpy/readers/seadas_l2.py index 281a0132af..03fa648330 100644 --- a/satpy/readers/seadas_l2.py +++ b/satpy/readers/seadas_l2.py @@ -57,9 +57,9 @@ def _rows_per_scan(self): def _platform_name(self): platform = self[self.platform_attr_name] - platform_dict = {'NPP': 'Suomi-NPP', - 'JPSS-1': 'NOAA-20', - 'JPSS-2': 'NOAA-21'} + platform_dict = {"NPP": "Suomi-NPP", + "JPSS-1": "NOAA-20", + "JPSS-2": "NOAA-21"} return platform_dict.get(platform, platform) @property diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index de52c24487..25e6ed1a8b 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -200,14 +200,14 @@ CHUNK_SIZE = get_legacy_chunk_size() PLATFORM_DICT = { - 'MET08': 'Meteosat-8', - 'MET09': 'Meteosat-9', - 'MET10': 'Meteosat-10', - 'MET11': 'Meteosat-11', - 'MSG1': 'Meteosat-8', - 'MSG2': 'Meteosat-9', - 'MSG3': 'Meteosat-10', - 'MSG4': 'Meteosat-11', + "MET08": "Meteosat-8", + "MET09": "Meteosat-9", + "MET10": "Meteosat-10", + "MET11": "Meteosat-11", + "MSG1": "Meteosat-8", + "MSG2": "Meteosat-9", + "MSG3": "Meteosat-10", + "MSG4": "Meteosat-11", } REPEAT_CYCLE_DURATION = 15 @@ -233,19 +233,19 @@ 11: "IR_134", 12: "HRV"} -VIS_CHANNELS = ['HRV', 'VIS006', 'VIS008', 'IR_016'] +VIS_CHANNELS = ["HRV", "VIS006", "VIS008", "IR_016"] # Polynomial coefficients for spectral-effective BT fits BTFIT = dict() # [A, B, C] -BTFIT['IR_039'] = [0.0, 1.011751900, -3.550400] -BTFIT['WV_062'] = [0.00001805700, 1.000255533, -1.790930] -BTFIT['WV_073'] = [0.00000231818, 1.000668281, -0.456166] -BTFIT['IR_087'] = [-0.00002332000, 1.011803400, -1.507390] -BTFIT['IR_097'] = [-0.00002055330, 1.009370670, -1.030600] -BTFIT['IR_108'] = [-0.00007392770, 1.032889800, -3.296740] -BTFIT['IR_120'] = [-0.00007009840, 1.031314600, -3.181090] -BTFIT['IR_134'] = [-0.00007293450, 1.030424800, -2.645950] +BTFIT["IR_039"] = [0.0, 1.011751900, -3.550400] +BTFIT["WV_062"] = [0.00001805700, 1.000255533, -1.790930] +BTFIT["WV_073"] = [0.00000231818, 1.000668281, -0.456166] +BTFIT["IR_087"] = [-0.00002332000, 1.011803400, -1.507390] +BTFIT["IR_097"] = [-0.00002055330, 1.009370670, -1.030600] +BTFIT["IR_108"] = [-0.00007392770, 1.032889800, -3.296740] +BTFIT["IR_120"] = [-0.00007009840, 1.031314600, -3.181090] +BTFIT["IR_134"] = [-0.00007293450, 1.030424800, -2.645950] SATNUM = {321: "8", 322: "9", @@ -255,124 +255,124 @@ CALIB = dict() # Meteosat 8 -CALIB[321] = {'HRV': {'F': 78.7599}, - 'VIS006': {'F': 65.2296}, - 'VIS008': {'F': 73.0127}, - 'IR_016': {'F': 62.3715}, - 'IR_039': {'VC': 2567.33, - 'ALPHA': 0.9956, - 'BETA': 3.41}, - 'WV_062': {'VC': 1598.103, - 'ALPHA': 0.9962, - 'BETA': 2.218}, - 'WV_073': {'VC': 1362.081, - 'ALPHA': 0.9991, - 'BETA': 0.478}, - 'IR_087': {'VC': 1149.069, - 'ALPHA': 0.9996, - 'BETA': 0.179}, - 'IR_097': {'VC': 1034.343, - 'ALPHA': 0.9999, - 'BETA': 0.06}, - 'IR_108': {'VC': 930.647, - 'ALPHA': 0.9983, - 'BETA': 0.625}, - 'IR_120': {'VC': 839.66, - 'ALPHA': 0.9988, - 'BETA': 0.397}, - 'IR_134': {'VC': 752.387, - 'ALPHA': 0.9981, - 'BETA': 0.578}} +CALIB[321] = {"HRV": {"F": 78.7599}, + "VIS006": {"F": 65.2296}, + "VIS008": {"F": 73.0127}, + "IR_016": {"F": 62.3715}, + "IR_039": {"VC": 2567.33, + "ALPHA": 0.9956, + "BETA": 3.41}, + "WV_062": {"VC": 1598.103, + "ALPHA": 0.9962, + "BETA": 2.218}, + "WV_073": {"VC": 1362.081, + "ALPHA": 0.9991, + "BETA": 0.478}, + "IR_087": {"VC": 1149.069, + "ALPHA": 0.9996, + "BETA": 0.179}, + "IR_097": {"VC": 1034.343, + "ALPHA": 0.9999, + "BETA": 0.06}, + "IR_108": {"VC": 930.647, + "ALPHA": 0.9983, + "BETA": 0.625}, + "IR_120": {"VC": 839.66, + "ALPHA": 0.9988, + "BETA": 0.397}, + "IR_134": {"VC": 752.387, + "ALPHA": 0.9981, + "BETA": 0.578}} # Meteosat 9 -CALIB[322] = {'HRV': {'F': 79.0113}, - 'VIS006': {'F': 65.2065}, - 'VIS008': {'F': 73.1869}, - 'IR_016': {'F': 61.9923}, - 'IR_039': {'VC': 2568.832, - 'ALPHA': 0.9954, - 'BETA': 3.438}, - 'WV_062': {'VC': 1600.548, - 'ALPHA': 0.9963, - 'BETA': 2.185}, - 'WV_073': {'VC': 1360.330, - 'ALPHA': 0.9991, - 'BETA': 0.47}, - 'IR_087': {'VC': 1148.620, - 'ALPHA': 0.9996, - 'BETA': 0.179}, - 'IR_097': {'VC': 1035.289, - 'ALPHA': 0.9999, - 'BETA': 0.056}, - 'IR_108': {'VC': 931.7, - 'ALPHA': 0.9983, - 'BETA': 0.64}, - 'IR_120': {'VC': 836.445, - 'ALPHA': 0.9988, - 'BETA': 0.408}, - 'IR_134': {'VC': 751.792, - 'ALPHA': 0.9981, - 'BETA': 0.561}} +CALIB[322] = {"HRV": {"F": 79.0113}, + "VIS006": {"F": 65.2065}, + "VIS008": {"F": 73.1869}, + "IR_016": {"F": 61.9923}, + "IR_039": {"VC": 2568.832, + "ALPHA": 0.9954, + "BETA": 3.438}, + "WV_062": {"VC": 1600.548, + "ALPHA": 0.9963, + "BETA": 2.185}, + "WV_073": {"VC": 1360.330, + "ALPHA": 0.9991, + "BETA": 0.47}, + "IR_087": {"VC": 1148.620, + "ALPHA": 0.9996, + "BETA": 0.179}, + "IR_097": {"VC": 1035.289, + "ALPHA": 0.9999, + "BETA": 0.056}, + "IR_108": {"VC": 931.7, + "ALPHA": 0.9983, + "BETA": 0.64}, + "IR_120": {"VC": 836.445, + "ALPHA": 0.9988, + "BETA": 0.408}, + "IR_134": {"VC": 751.792, + "ALPHA": 0.9981, + "BETA": 0.561}} # Meteosat 10 -CALIB[323] = {'HRV': {'F': 78.9416}, - 'VIS006': {'F': 65.5148}, - 'VIS008': {'F': 73.1807}, - 'IR_016': {'F': 62.0208}, - 'IR_039': {'VC': 2547.771, - 'ALPHA': 0.9915, - 'BETA': 2.9002}, - 'WV_062': {'VC': 1595.621, - 'ALPHA': 0.9960, - 'BETA': 2.0337}, - 'WV_073': {'VC': 1360.337, - 'ALPHA': 0.9991, - 'BETA': 0.4340}, - 'IR_087': {'VC': 1148.130, - 'ALPHA': 0.9996, - 'BETA': 0.1714}, - 'IR_097': {'VC': 1034.715, - 'ALPHA': 0.9999, - 'BETA': 0.0527}, - 'IR_108': {'VC': 929.842, - 'ALPHA': 0.9983, - 'BETA': 0.6084}, - 'IR_120': {'VC': 838.659, - 'ALPHA': 0.9988, - 'BETA': 0.3882}, - 'IR_134': {'VC': 750.653, - 'ALPHA': 0.9982, - 'BETA': 0.5390}} +CALIB[323] = {"HRV": {"F": 78.9416}, + "VIS006": {"F": 65.5148}, + "VIS008": {"F": 73.1807}, + "IR_016": {"F": 62.0208}, + "IR_039": {"VC": 2547.771, + "ALPHA": 0.9915, + "BETA": 2.9002}, + "WV_062": {"VC": 1595.621, + "ALPHA": 0.9960, + "BETA": 2.0337}, + "WV_073": {"VC": 1360.337, + "ALPHA": 0.9991, + "BETA": 0.4340}, + "IR_087": {"VC": 1148.130, + "ALPHA": 0.9996, + "BETA": 0.1714}, + "IR_097": {"VC": 1034.715, + "ALPHA": 0.9999, + "BETA": 0.0527}, + "IR_108": {"VC": 929.842, + "ALPHA": 0.9983, + "BETA": 0.6084}, + "IR_120": {"VC": 838.659, + "ALPHA": 0.9988, + "BETA": 0.3882}, + "IR_134": {"VC": 750.653, + "ALPHA": 0.9982, + "BETA": 0.5390}} # Meteosat 11 -CALIB[324] = {'HRV': {'F': 79.0035}, - 'VIS006': {'F': 65.2656}, - 'VIS008': {'F': 73.1692}, - 'IR_016': {'F': 61.9416}, - 'IR_039': {'VC': 2555.280, - 'ALPHA': 0.9916, - 'BETA': 2.9438}, - 'WV_062': {'VC': 1596.080, - 'ALPHA': 0.9959, - 'BETA': 2.0780}, - 'WV_073': {'VC': 1361.748, - 'ALPHA': 0.9990, - 'BETA': 0.4929}, - 'IR_087': {'VC': 1147.433, - 'ALPHA': 0.9996, - 'BETA': 0.1731}, - 'IR_097': {'VC': 1034.851, - 'ALPHA': 0.9998, - 'BETA': 0.0597}, - 'IR_108': {'VC': 931.122, - 'ALPHA': 0.9983, - 'BETA': 0.6256}, - 'IR_120': {'VC': 839.113, - 'ALPHA': 0.9988, - 'BETA': 0.4002}, - 'IR_134': {'VC': 748.585, - 'ALPHA': 0.9981, - 'BETA': 0.5635}} +CALIB[324] = {"HRV": {"F": 79.0035}, + "VIS006": {"F": 65.2656}, + "VIS008": {"F": 73.1692}, + "IR_016": {"F": 61.9416}, + "IR_039": {"VC": 2555.280, + "ALPHA": 0.9916, + "BETA": 2.9438}, + "WV_062": {"VC": 1596.080, + "ALPHA": 0.9959, + "BETA": 2.0780}, + "WV_073": {"VC": 1361.748, + "ALPHA": 0.9990, + "BETA": 0.4929}, + "IR_087": {"VC": 1147.433, + "ALPHA": 0.9996, + "BETA": 0.1731}, + "IR_097": {"VC": 1034.851, + "ALPHA": 0.9998, + "BETA": 0.0597}, + "IR_108": {"VC": 931.122, + "ALPHA": 0.9983, + "BETA": 0.6256}, + "IR_120": {"VC": 839.113, + "ALPHA": 0.9988, + "BETA": 0.4002}, + "IR_134": {"VC": 748.585, + "ALPHA": 0.9981, + "BETA": 0.5635}} # Calibration coefficients from Meirink, J.F., R.A. Roebeling and P. Stammes, 2013: # Inter-calibration of polar imager solar channels using SEVIRI, Atm. Meas. Tech., 6, @@ -390,34 +390,34 @@ MEIRINK_EPOCH = datetime(2000, 1, 1) MEIRINK_COEFS: dict[str, dict[int, dict[str, tuple[float, float]]]] = {} -MEIRINK_COEFS['2023'] = {} +MEIRINK_COEFS["2023"] = {} # Meteosat-8 -MEIRINK_COEFS['2023'][321] = {'VIS006': (24.346, 0.3739), - 'VIS008': (30.989, 0.3111), - 'IR_016': (22.869, 0.0065) +MEIRINK_COEFS["2023"][321] = {"VIS006": (24.346, 0.3739), + "VIS008": (30.989, 0.3111), + "IR_016": (22.869, 0.0065) } # Meteosat-9 -MEIRINK_COEFS['2023'][322] = {'VIS006': (21.026, 0.2556), - 'VIS008': (26.875, 0.1835), - 'IR_016': (21.394, 0.0498) +MEIRINK_COEFS["2023"][322] = {"VIS006": (21.026, 0.2556), + "VIS008": (26.875, 0.1835), + "IR_016": (21.394, 0.0498) } # Meteosat-10 -MEIRINK_COEFS['2023'][323] = {'VIS006': (19.829, 0.5856), - 'VIS008': (25.284, 0.6787), - 'IR_016': (23.066, -0.0286) +MEIRINK_COEFS["2023"][323] = {"VIS006": (19.829, 0.5856), + "VIS008": (25.284, 0.6787), + "IR_016": (23.066, -0.0286) } # Meteosat-11 -MEIRINK_COEFS['2023'][324] = {'VIS006': (20.515, 0.3600), - 'VIS008': (25.803, 0.4844), - 'IR_016': (22.354, -0.0187) +MEIRINK_COEFS["2023"][324] = {"VIS006": (20.515, 0.3600), + "VIS008": (25.803, 0.4844), + "IR_016": (22.354, -0.0187) } @@ -440,7 +440,7 @@ def get_meirink_slope(meirink_coefs, acquisition_time): def should_apply_meirink(calib_mode, channel_name): """Decide whether to use the Meirink calibration coefficients.""" - return "MEIRINK" in calib_mode and channel_name in ['VIS006', 'VIS008', 'IR_016'] + return "MEIRINK" in calib_mode and channel_name in ["VIS006", "VIS008", "IR_016"] class MeirinkCalibrationHandler: @@ -448,7 +448,7 @@ class MeirinkCalibrationHandler: def __init__(self, calib_mode): """Initialize the calibration handler.""" - self.coefs = MEIRINK_COEFS[calib_mode.split('-')[1]] + self.coefs = MEIRINK_COEFS[calib_mode.split("-")[1]] def get_slope(self, platform, channel, time): """Return the slope using the provided calibration coefficients.""" @@ -472,12 +472,12 @@ def get_cds_time(days, msecs): """ if np.isscalar(days): - days = np.array([days], dtype='int64') - msecs = np.array([msecs], dtype='int64') + days = np.array([days], dtype="int64") + msecs = np.array([msecs], dtype="int64") - time = np.datetime64('1958-01-01').astype('datetime64[ms]') + \ - days.astype('timedelta64[D]') + msecs.astype('timedelta64[ms]') - time[time == np.datetime64('1958-01-01 00:00')] = np.datetime64("NaT") + time = np.datetime64("1958-01-01").astype("datetime64[ms]") + \ + days.astype("timedelta64[D]") + msecs.astype("timedelta64[ms]") + time[time == np.datetime64("1958-01-01 00:00")] = np.datetime64("NaT") if len(time) == 1: return time[0] @@ -486,9 +486,9 @@ def get_cds_time(days, msecs): def add_scanline_acq_time(dataset, acq_time): """Add scanline acquisition time to the given dataset.""" - dataset.coords['acq_time'] = ('y', acq_time) - dataset.coords['acq_time'].attrs[ - 'long_name'] = 'Mean scanline acquisition time' + dataset.coords["acq_time"] = ("y", acq_time) + dataset.coords["acq_time"].attrs[ + "long_name"] = "Mean scanline acquisition time" def dec10216(inbuf): @@ -538,53 +538,53 @@ class MpefProductHeader(object): def get(self): """Return numpy record_array for MPEF product header.""" record = [ - ('MPEF_File_Id', np.int16), - ('MPEF_Header_Version', np.uint8), - ('ManualDissAuthRequest', bool), - ('ManualDisseminationAuth', bool), - ('DisseminationAuth', bool), - ('NominalTime', time_cds_short), - ('ProductQuality', np.uint8), - ('ProductCompleteness', np.uint8), - ('ProductTimeliness', np.uint8), - ('ProcessingInstanceId', np.int8), - ('ImagesUsed', self.images_used, (4,)), - ('BaseAlgorithmVersion', + ("MPEF_File_Id", np.int16), + ("MPEF_Header_Version", np.uint8), + ("ManualDissAuthRequest", bool), + ("ManualDisseminationAuth", bool), + ("DisseminationAuth", bool), + ("NominalTime", time_cds_short), + ("ProductQuality", np.uint8), + ("ProductCompleteness", np.uint8), + ("ProductTimeliness", np.uint8), + ("ProcessingInstanceId", np.int8), + ("ImagesUsed", self.images_used, (4,)), + ("BaseAlgorithmVersion", issue_revision), - ('ProductAlgorithmVersion', + ("ProductAlgorithmVersion", issue_revision), - ('InstanceServerName', 'S2'), - ('SpacecraftName', 'S2'), - ('Mission', 'S3'), - ('RectificationLongitude', 'S5'), - ('Encoding', 'S1'), - ('TerminationSpace', 'S1'), - ('EncodingVersion', np.uint16), - ('Channel', np.uint8), - ('ImageLocation', 'S3'), - ('GsicsCalMode', np.bool_), - ('GsicsCalValidity', np.bool_), - ('Padding', 'S2'), - ('OffsetToData', np.uint32), - ('Padding2', 'S9'), - ('RepeatCycle', 'S15'), + ("InstanceServerName", "S2"), + ("SpacecraftName", "S2"), + ("Mission", "S3"), + ("RectificationLongitude", "S5"), + ("Encoding", "S1"), + ("TerminationSpace", "S1"), + ("EncodingVersion", np.uint16), + ("Channel", np.uint8), + ("ImageLocation", "S3"), + ("GsicsCalMode", np.bool_), + ("GsicsCalValidity", np.bool_), + ("Padding", "S2"), + ("OffsetToData", np.uint32), + ("Padding2", "S9"), + ("RepeatCycle", "S15"), ] - return np.dtype(record).newbyteorder('>') + return np.dtype(record).newbyteorder(">") @property def images_used(self): """Return structure for images_used.""" record = [ - ('Padding1', 'S2'), - ('ExpectedImage', time_cds_short), - ('ImageReceived', bool), - ('Padding2', 'S1'), - ('UsedImageStart_Day', np.uint16), - ('UsedImageStart_Millsec', np.uint32), - ('Padding3', 'S2'), - ('UsedImageEnd_Day', np.uint16), - ('UsedImageEndt_Millsec', np.uint32), + ("Padding1", "S2"), + ("ExpectedImage", time_cds_short), + ("ImageReceived", bool), + ("Padding2", "S1"), + ("UsedImageStart_Day", np.uint16), + ("UsedImageStart_Millsec", np.uint32), + ("Padding3", "S2"), + ("UsedImageEnd_Day", np.uint16), + ("UsedImageEndt_Millsec", np.uint32), ] return record @@ -624,7 +624,7 @@ def ir_calibrate(self, data, channel_name, cal_type): # effective radiances return self._erads2bt(data, channel_name) else: - raise NotImplementedError('Unknown calibration type') + raise NotImplementedError("Unknown calibration type") def _srads2bt(self, data, channel_name): """Convert spectral radiance to brightness temperature.""" @@ -668,36 +668,36 @@ def __init__(self, platform_id, channel_name, coefs, calib_mode, scan_time): scan_time=self._scan_time ) - valid_modes = ('NOMINAL', 'GSICS', 'MEIRINK-2023') + valid_modes = ("NOMINAL", "GSICS", "MEIRINK-2023") if self._calib_mode not in valid_modes: raise ValueError( - 'Invalid calibration mode: {}. Choose one of {}'.format( + "Invalid calibration mode: {}. Choose one of {}".format( self._calib_mode, valid_modes) ) def calibrate(self, data, calibration): """Calibrate the given data.""" - if calibration == 'counts': + if calibration == "counts": res = data - elif calibration in ['radiance', 'reflectance', - 'brightness_temperature']: + elif calibration in ["radiance", "reflectance", + "brightness_temperature"]: gain, offset = self.get_gain_offset() res = self._algo.convert_to_radiance( data.astype(np.float32), gain, offset ) else: raise ValueError( - 'Invalid calibration {} for channel {}'.format( + "Invalid calibration {} for channel {}".format( calibration, self._channel_name ) ) - if calibration == 'reflectance': + if calibration == "reflectance": solar_irradiance = CALIB[self._platform_id][self._channel_name]["F"] res = self._algo.vis_calibrate(res, solar_irradiance) - elif calibration == 'brightness_temperature': + elif calibration == "brightness_temperature": res = self._algo.ir_calibrate( - res, self._channel_name, self._coefs['radiance_type'] + res, self._channel_name, self._coefs["radiance_type"] ) return res @@ -710,14 +710,14 @@ def get_gain_offset(self): nominal coefficients. External coefficients take precedence over internal coefficients. """ - coefs = self._coefs['coefs'] + coefs = self._coefs["coefs"] # Select internal coefficients for the given calibration mode - internal_gain = coefs['NOMINAL']['gain'] - internal_offset = coefs['NOMINAL']['offset'] - if self._calib_mode == 'GSICS': - gsics_gain = coefs['GSICS']['gain'] - gsics_offset = coefs['GSICS']['offset'] * gsics_gain + internal_gain = coefs["NOMINAL"]["gain"] + internal_offset = coefs["NOMINAL"]["offset"] + if self._calib_mode == "GSICS": + gsics_gain = coefs["GSICS"]["gain"] + gsics_offset = coefs["GSICS"]["offset"] * gsics_gain if gsics_gain != 0 and gsics_offset != 0: # If no GSICS coefficients are available for a certain channel, # they are set to zero in the file. @@ -729,8 +729,8 @@ def get_gain_offset(self): internal_gain = meirink.get_slope(self._platform_id, self._channel_name, self._scan_time) # Override with external coefficients, if any. - gain = coefs['EXTERNAL'].get('gain', internal_gain) - offset = coefs['EXTERNAL'].get('offset', internal_offset) + gain = coefs["EXTERNAL"].get("gain", internal_gain) + offset = coefs["EXTERNAL"].get("offset", internal_offset) return gain, offset @@ -795,9 +795,9 @@ def evaluate(self, time): Returns: Earth-centered cartesian coordinates (x, y, z) in meters """ - domain = [np.datetime64(self.start_time).astype('int64'), - np.datetime64(self.end_time).astype('int64')] - time = np.datetime64(time).astype('int64') + domain = [np.datetime64(self.start_time).astype("int64"), + np.datetime64(self.end_time).astype("int64")] + time = np.datetime64(time).astype("int64") x, y, z = chebyshev_3d(self.coefs, time, domain) return x * 1000, y * 1000, z * 1000 # km -> m @@ -824,10 +824,10 @@ def get_satpos(orbit_polynomial, time, semi_major_axis, semi_minor_axis): """ x, y, z = orbit_polynomial.evaluate(time) geocent = pyproj.CRS( - proj='geocent', a=semi_major_axis, b=semi_minor_axis, units='m' + proj="geocent", a=semi_major_axis, b=semi_minor_axis, units="m" ) latlong = pyproj.CRS( - proj='latlong', a=semi_major_axis, b=semi_minor_axis, units='m' + proj="latlong", a=semi_major_axis, b=semi_minor_axis, units="m" ) transformer = pyproj.Transformer.from_crs(geocent, latlong) lon, lat, alt = transformer.transform(x, y, z) @@ -856,10 +856,10 @@ def __init__(self, orbit_polynomials): self.orbit_polynomials = orbit_polynomials # Left/right boundaries of time intervals for which the polynomials are # valid. - self.valid_from = orbit_polynomials['StartTime'][0, :].astype( - 'datetime64[us]') - self.valid_to = orbit_polynomials['EndTime'][0, :].astype( - 'datetime64[us]') + self.valid_from = orbit_polynomials["StartTime"][0, :].astype( + "datetime64[us]") + self.valid_to = orbit_polynomials["EndTime"][0, :].astype( + "datetime64[us]") def get_orbit_polynomial(self, time, max_delta=6): """Get orbit polynomial valid for the given time. @@ -888,16 +888,16 @@ def get_orbit_polynomial(self, time, max_delta=6): match = self._get_enclosing_interval(time) except ValueError: warnings.warn( - 'No orbit polynomial valid for {}. Using closest ' - 'match.'.format(time), + "No orbit polynomial valid for {}. Using closest " + "match.".format(time), stacklevel=2 ) match = self._get_closest_interval_within(time, max_delta) return OrbitPolynomial( coefs=( - self.orbit_polynomials['X'][match], - self.orbit_polynomials['Y'][match], - self.orbit_polynomials['Z'][match] + self.orbit_polynomials["X"][match], + self.orbit_polynomials["Y"][match], + self.orbit_polynomials["Z"][match] ), start_time=self.valid_from[match], end_time=self.valid_to[match] @@ -925,12 +925,12 @@ def _get_closest_interval_within(self, time, threshold): Index of closest interval """ closest_match, distance = self._get_closest_interval(time) - threshold_diff = np.timedelta64(threshold, 'h') + threshold_diff = np.timedelta64(threshold, "h") if distance < threshold_diff: return closest_match raise NoValidOrbitParams( - 'Unable to find orbit coefficients valid for {} +/- {}' - 'hours'.format(time, threshold) + "Unable to find orbit coefficients valid for {} +/- {}" + "hours".format(time, threshold) ) def _get_closest_interval(self, time): @@ -942,7 +942,7 @@ def _get_closest_interval(self, time): intervals_centre = self.valid_from + 0.5 * ( self.valid_to - self.valid_from ) - diffs_us = (time - intervals_centre).astype('i8') + diffs_us = (time - intervals_centre).astype("i8") closest_match = np.argmin(np.fabs(diffs_us)) distance = abs(intervals_centre[closest_match] - time) return closest_match, distance @@ -963,6 +963,7 @@ def calculate_area_extent(area_dict): line_step: Pixel resolution in meters in south-north direction [column_offset: Column offset, defaults to 0 if not given] [line_offset: Line offset, defaults to 0 if not given] + Returns: tuple: An area extent for the scene defined by the lower left and upper right corners @@ -970,15 +971,15 @@ def calculate_area_extent(area_dict): # For Earth model 2 and full disk VISIR, (center_point - west - 0.5 + we_offset) must be -1856.5 . # See MSG Level 1.5 Image Data Format Description Figure 7 - Alignment and numbering of the non-HRV pixels. """ - center_point = area_dict['center_point'] - east = area_dict['east'] - west = area_dict['west'] - south = area_dict['south'] - north = area_dict['north'] - column_step = area_dict['column_step'] - line_step = area_dict['line_step'] - column_offset = area_dict.get('column_offset', 0) - line_offset = area_dict.get('line_offset', 0) + center_point = area_dict["center_point"] + east = area_dict["east"] + west = area_dict["west"] + south = area_dict["south"] + north = area_dict["north"] + column_step = area_dict["column_step"] + line_step = area_dict["line_step"] + column_offset = area_dict.get("column_offset", 0) + line_offset = area_dict.get("line_offset", 0) ll_c = (center_point - east + 0.5 + column_offset) * column_step ll_l = (north - center_point + 0.5 + line_offset) * line_step @@ -991,18 +992,18 @@ def calculate_area_extent(area_dict): def create_coef_dict(coefs_nominal, coefs_gsics, radiance_type, ext_coefs): """Create coefficient dictionary expected by calibration class.""" return { - 'coefs': { - 'NOMINAL': { - 'gain': coefs_nominal[0], - 'offset': coefs_nominal[1], + "coefs": { + "NOMINAL": { + "gain": coefs_nominal[0], + "offset": coefs_nominal[1], }, - 'GSICS': { - 'gain': coefs_gsics[0], - 'offset': coefs_gsics[1] + "GSICS": { + "gain": coefs_gsics[0], + "offset": coefs_gsics[1] }, - 'EXTERNAL': ext_coefs + "EXTERNAL": ext_coefs }, - 'radiance_type': radiance_type + "radiance_type": radiance_type } @@ -1022,7 +1023,7 @@ def pad_data_horizontally(data, final_size, east_bound, west_bound): """Pad the data given east and west bounds and the desired size.""" nlines = final_size[0] if west_bound - east_bound != data.shape[1] - 1: - raise IndexError('East and west bounds do not match data shape') + raise IndexError("East and west bounds do not match data shape") padding_east = get_padding_area((nlines, east_bound - 1), data.dtype) padding_west = get_padding_area((nlines, (final_size[1] - west_bound)), data.dtype) @@ -1034,7 +1035,7 @@ def pad_data_vertically(data, final_size, south_bound, north_bound): """Pad the data given south and north bounds and the desired size.""" ncols = final_size[1] if north_bound - south_bound != data.shape[0] - 1: - raise IndexError('South and north bounds do not match data shape') + raise IndexError("South and north bounds do not match data shape") padding_south = get_padding_area((south_bound - 1, ncols), data.dtype) padding_north = get_padding_area(((final_size[0] - north_bound), ncols), data.dtype) diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 2b153edfcc..3b3aa82277 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -84,8 +84,8 @@ } -Example -------- +Example: +-------- Here is an example how to read the data in satpy: .. code-block:: python @@ -252,33 +252,33 @@ from satpy.utils import get_legacy_chunk_size CHUNK_SIZE = get_legacy_chunk_size() -logger = logging.getLogger('hrit_msg') +logger = logging.getLogger("hrit_msg") # MSG implementation: -key_header = np.dtype([('key_number', 'u1'), - ('seed', '>f8')]) - -segment_identification = np.dtype([('GP_SC_ID', '>i2'), - ('spectral_channel_id', '>i1'), - ('segment_sequence_number', '>u2'), - ('planned_start_segment_number', '>u2'), - ('planned_end_segment_number', '>u2'), - ('data_field_representation', '>i1')]) - -image_segment_line_quality = np.dtype([('line_number_in_grid', '>i4'), - ('line_mean_acquisition', - [('days', '>u2'), - ('milliseconds', '>u4')]), - ('line_validity', 'u1'), - ('line_radiometric_quality', 'u1'), - ('line_geometric_quality', 'u1')]) +key_header = np.dtype([("key_number", "u1"), + ("seed", ">f8")]) + +segment_identification = np.dtype([("GP_SC_ID", ">i2"), + ("spectral_channel_id", ">i1"), + ("segment_sequence_number", ">u2"), + ("planned_start_segment_number", ">u2"), + ("planned_end_segment_number", ">u2"), + ("data_field_representation", ">i1")]) + +image_segment_line_quality = np.dtype([("line_number_in_grid", ">i4"), + ("line_mean_acquisition", + [("days", ">u2"), + ("milliseconds", ">u4")]), + ("line_validity", "u1"), + ("line_radiometric_quality", "u1"), + ("line_geometric_quality", "u1")]) msg_variable_length_headers = { - image_segment_line_quality: 'image_segment_line_quality'} + image_segment_line_quality: "image_segment_line_quality"} -msg_text_headers = {image_data_function: 'image_data_function', - annotation_header: 'annotation_header', - ancillary_text: 'ancillary_text'} +msg_text_headers = {image_data_function: "image_data_function", + annotation_header: "annotation_header", + ancillary_text: "ancillary_text"} msg_hdr_map = base_hdr_map.copy() msg_hdr_map.update({7: key_header, @@ -287,23 +287,23 @@ }) -orbit_coef = np.dtype([('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('X', '>f8', (8, )), - ('Y', '>f8', (8, )), - ('Z', '>f8', (8, )), - ('VX', '>f8', (8, )), - ('VY', '>f8', (8, )), - ('VZ', '>f8', (8, ))]) +orbit_coef = np.dtype([("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("X", ">f8", (8, )), + ("Y", ">f8", (8, )), + ("Z", ">f8", (8, )), + ("VX", ">f8", (8, )), + ("VY", ">f8", (8, )), + ("VZ", ">f8", (8, ))]) -attitude_coef = np.dtype([('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('XofSpinAxis', '>f8', (8, )), - ('YofSpinAxis', '>f8', (8, )), - ('ZofSpinAxis', '>f8', (8, ))]) +attitude_coef = np.dtype([("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("XofSpinAxis", ">f8", (8, )), + ("YofSpinAxis", ">f8", (8, )), + ("ZofSpinAxis", ">f8", (8, ))]) -cuc_time = np.dtype([('coarse', 'u1', (4, )), - ('fine', 'u1', (3, ))]) +cuc_time = np.dtype([("coarse", "u1", (4, )), + ("fine", "u1", (3, ))]) class HRITMSGPrologueEpilogueBase(HRITFileHandler): @@ -328,7 +328,7 @@ def reduce(self, max_size): class HRITMSGPrologueFileHandler(HRITMSGPrologueEpilogueBase): """SEVIRI HRIT prologue reader.""" - def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', + def __init__(self, filename, filename_info, filetype_info, calib_mode="nominal", ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=None, fill_hrv=None, mask_bad_quality_scan_lines=None): """Initialize the reader.""" @@ -340,22 +340,22 @@ def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', self.prologue = {} self.read_prologue() - service = filename_info['service'] - if service == '': - self.mda['service'] = '0DEG' + service = filename_info["service"] + if service == "": + self.mda["service"] = "0DEG" else: - self.mda['service'] = service + self.mda["service"] = service def read_prologue(self): """Read the prologue metadata.""" with utils.generic_open(self.filename, mode="rb") as fp_: - fp_.seek(self.mda['total_header_length']) + fp_.seek(self.mda["total_header_length"]) data = np.frombuffer(fp_.read(hrit_prologue.itemsize), dtype=hrit_prologue, count=1) self.prologue.update(recarray2dict(data)) try: impf = np.frombuffer(fp_.read(impf_configuration.itemsize), dtype=impf_configuration, count=1)[0] except ValueError: - logger.info('No IMPF configuration field found in prologue.') + logger.info("No IMPF configuration field found in prologue.") else: self.prologue.update(recarray2dict(impf)) @@ -368,8 +368,8 @@ def satpos(self): Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ a, b = self.get_earth_radii() - poly_finder = OrbitPolynomialFinder(self.prologue['SatelliteStatus'][ - 'Orbit']['OrbitPolynomial']) + poly_finder = OrbitPolynomialFinder(self.prologue["SatelliteStatus"][ + "Orbit"]["OrbitPolynomial"]) orbit_polynomial = poly_finder.get_orbit_polynomial(self.observation_start_time) return get_satpos( orbit_polynomial=orbit_polynomial, @@ -385,10 +385,10 @@ def get_earth_radii(self): Equatorial radius, polar radius [m] """ - earth_model = self.prologue['GeometricProcessing']['EarthModel'] - a = earth_model['EquatorialRadius'] * 1000 - b = (earth_model['NorthPolarRadius'] + - earth_model['SouthPolarRadius']) / 2.0 * 1000 + earth_model = self.prologue["GeometricProcessing"]["EarthModel"] + a = earth_model["EquatorialRadius"] * 1000 + b = (earth_model["NorthPolarRadius"] + + earth_model["SouthPolarRadius"]) / 2.0 * 1000 return a, b def reduce(self, max_size): @@ -399,7 +399,7 @@ def reduce(self, max_size): class HRITMSGEpilogueFileHandler(HRITMSGPrologueEpilogueBase): """SEVIRI HRIT epilogue reader.""" - def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', + def __init__(self, filename, filename_info, filetype_info, calib_mode="nominal", ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=None, fill_hrv=None, mask_bad_quality_scan_lines=None): """Initialize the reader.""" @@ -411,16 +411,16 @@ def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', self.epilogue = {} self.read_epilogue() - service = filename_info['service'] - if service == '': - self.mda['service'] = '0DEG' + service = filename_info["service"] + if service == "": + self.mda["service"] = "0DEG" else: - self.mda['service'] = service + self.mda["service"] = service def read_epilogue(self): """Read the epilogue metadata.""" with utils.generic_open(self.filename, mode="rb") as fp_: - fp_.seek(self.mda['total_header_length']) + fp_.seek(self.mda["total_header_length"]) data = np.frombuffer(fp_.read(hrit_epilogue.itemsize), dtype=hrit_epilogue, count=1) self.epilogue.update(recarray2dict(data)) @@ -454,7 +454,7 @@ class HRITMSGFileHandler(HRITFileHandler): """ def __init__(self, filename, filename_info, filetype_info, - prologue, epilogue, calib_mode='nominal', + prologue, epilogue, calib_mode="nominal", ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=100, fill_hrv=True, mask_bad_quality_scan_lines=True): @@ -480,27 +480,27 @@ def __init__(self, filename, filename_info, filetype_info, def _get_header(self): """Read the header info, and fill the metadata dictionary.""" - earth_model = self.prologue['GeometricProcessing']['EarthModel'] - self.mda['offset_corrected'] = earth_model['TypeOfEarthModel'] == 2 + earth_model = self.prologue["GeometricProcessing"]["EarthModel"] + self.mda["offset_corrected"] = earth_model["TypeOfEarthModel"] == 2 # Projection a, b = self.prologue_.get_earth_radii() - self.mda['projection_parameters']['a'] = a - self.mda['projection_parameters']['b'] = b - ssp = self.prologue['ImageDescription'][ - 'ProjectionDescription']['LongitudeOfSSP'] - self.mda['projection_parameters']['SSP_longitude'] = ssp - self.mda['projection_parameters']['SSP_latitude'] = 0.0 + self.mda["projection_parameters"]["a"] = a + self.mda["projection_parameters"]["b"] = b + ssp = self.prologue["ImageDescription"][ + "ProjectionDescription"]["LongitudeOfSSP"] + self.mda["projection_parameters"]["SSP_longitude"] = ssp + self.mda["projection_parameters"]["SSP_latitude"] = 0.0 # Orbital parameters - self.mda['orbital_parameters']['satellite_nominal_longitude'] = self.prologue['SatelliteStatus'][ - 'SatelliteDefinition']['NominalLongitude'] - self.mda['orbital_parameters']['satellite_nominal_latitude'] = 0.0 + self.mda["orbital_parameters"]["satellite_nominal_longitude"] = self.prologue["SatelliteStatus"][ + "SatelliteDefinition"]["NominalLongitude"] + self.mda["orbital_parameters"]["satellite_nominal_latitude"] = 0.0 try: actual_lon, actual_lat, actual_alt = self.prologue_.satpos - self.mda['orbital_parameters']['satellite_actual_longitude'] = actual_lon - self.mda['orbital_parameters']['satellite_actual_latitude'] = actual_lat - self.mda['orbital_parameters']['satellite_actual_altitude'] = actual_alt + self.mda["orbital_parameters"]["satellite_actual_longitude"] = actual_lon + self.mda["orbital_parameters"]["satellite_actual_latitude"] = actual_lat + self.mda["orbital_parameters"]["satellite_actual_altitude"] = actual_alt except NoValidOrbitParams as err: logger.warning(err) @@ -508,46 +508,46 @@ def _get_header(self): self.platform_id = self.prologue["SatelliteStatus"][ "SatelliteDefinition"]["SatelliteId"] self.platform_name = "Meteosat-" + SATNUM[self.platform_id] - self.mda['platform_name'] = self.platform_name - service = self._filename_info['service'] - if service == '': - self.mda['service'] = '0DEG' + self.mda["platform_name"] = self.platform_name + service = self._filename_info["service"] + if service == "": + self.mda["service"] = "0DEG" else: - self.mda['service'] = service - self.channel_name = CHANNEL_NAMES[self.mda['spectral_channel_id']] + self.mda["service"] = service + self.channel_name = CHANNEL_NAMES[self.mda["spectral_channel_id"]] @property def _repeat_cycle_duration(self): """Get repeat cycle duration from epilogue.""" - if self.epilogue['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: + if self.epilogue["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] == 1: return 5 return REPEAT_CYCLE_DURATION @property def nominal_start_time(self): """Get the start time and round it according to scan law.""" - tm = self.prologue['ImageAcquisition'][ - 'PlannedAcquisitionTime']['TrueRepeatCycleStart'] + tm = self.prologue["ImageAcquisition"][ + "PlannedAcquisitionTime"]["TrueRepeatCycleStart"] return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): """Get the end time and round it according to scan law.""" - tm = self.prologue['ImageAcquisition'][ - 'PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] + tm = self.prologue["ImageAcquisition"][ + "PlannedAcquisitionTime"]["PlannedRepeatCycleEnd"] return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def observation_start_time(self): """Get the observation start time.""" - return self.epilogue['ImageProductionStats'][ - 'ActualScanningSummary']['ForwardScanStart'] + return self.epilogue["ImageProductionStats"][ + "ActualScanningSummary"]["ForwardScanStart"] @property def observation_end_time(self): """Get the observation end time.""" - return self.epilogue['ImageProductionStats'][ - 'ActualScanningSummary']['ForwardScanEnd'] + return self.epilogue["ImageProductionStats"][ + "ActualScanningSummary"]["ForwardScanEnd"] @property def start_time(self): @@ -572,7 +572,7 @@ def _get_area_extent(self, pdict): """ aex = get_area_extent(pdict) - if not self.mda['offset_corrected']: + if not self.mda["offset_corrected"]: # Geo-referencing offset present. Adjust area extent to match the shifted data. Note that we have to adjust # the corners in the *opposite* direction, i.e. S-E. Think of it as if the coastlines were fixed, and you # dragged the image to S-E until coastlines and data area aligned correctly. @@ -589,80 +589,80 @@ def _get_area_extent(self, pdict): def get_area_def(self, dsid): """Get the area definition of the band.""" # Common parameters for both HRV and other channels - nlines = int(self.mda['number_of_lines']) - loff = np.float32(self.mda['loff']) + nlines = int(self.mda["number_of_lines"]) + loff = np.float32(self.mda["loff"]) pdict = dict() - pdict['cfac'] = np.int32(self.mda['cfac']) - pdict['lfac'] = np.int32(self.mda['lfac']) - pdict['coff'] = np.float32(self.mda['coff']) - - pdict['a'] = self.mda['projection_parameters']['a'] - pdict['b'] = self.mda['projection_parameters']['b'] - pdict['h'] = self.mda['projection_parameters']['h'] - pdict['ssp_lon'] = self.mda['projection_parameters']['SSP_longitude'] - - pdict['nlines'] = nlines - pdict['ncols'] = int(self.mda['number_of_columns']) - if (self.prologue['ImageDescription']['Level15ImageProduction'] - ['ImageProcDirection'] == 0): - pdict['scandir'] = 'N2S' + pdict["cfac"] = np.int32(self.mda["cfac"]) + pdict["lfac"] = np.int32(self.mda["lfac"]) + pdict["coff"] = np.float32(self.mda["coff"]) + + pdict["a"] = self.mda["projection_parameters"]["a"] + pdict["b"] = self.mda["projection_parameters"]["b"] + pdict["h"] = self.mda["projection_parameters"]["h"] + pdict["ssp_lon"] = self.mda["projection_parameters"]["SSP_longitude"] + + pdict["nlines"] = nlines + pdict["ncols"] = int(self.mda["number_of_columns"]) + if (self.prologue["ImageDescription"]["Level15ImageProduction"] + ["ImageProcDirection"] == 0): + pdict["scandir"] = "N2S" else: - pdict['scandir'] = 'S2N' + pdict["scandir"] = "S2N" - area_naming_input_dict = {'platform_name': 'msg', - 'instrument_name': 'seviri', - 'resolution': int(dsid['resolution']) + area_naming_input_dict = {"platform_name": "msg", + "instrument_name": "seviri", + "resolution": int(dsid["resolution"]) } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('seviri', pdict['ssp_lon'])}) + **get_service_mode("seviri", pdict["ssp_lon"])}) # Compute area definition for non-HRV channels: - if dsid['name'] != 'HRV': - pdict['loff'] = loff - nlines + if dsid["name"] != "HRV": + pdict["loff"] = loff - nlines aex = self._get_area_extent(pdict) - pdict['a_name'] = area_naming['area_id'] - pdict['a_desc'] = area_naming['description'] - pdict['p_id'] = "" + pdict["a_name"] = area_naming["area_id"] + pdict["a_desc"] = area_naming["description"] + pdict["p_id"] = "" area = get_area_definition(pdict, aex) self.area = area return self.area - segment_number = self.mda['segment_sequence_number'] + segment_number = self.mda["segment_sequence_number"] current_first_line = ((segment_number - - self.mda['planned_start_segment_number']) - * pdict['nlines']) + self.mda["planned_start_segment_number"]) + * pdict["nlines"]) # Or, if we are processing HRV: - pdict['a_name'] = area_naming['area_id'] - pdict['p_id'] = "" - bounds = self.epilogue['ImageProductionStats']['ActualL15CoverageHRV'].copy() + pdict["a_name"] = area_naming["area_id"] + pdict["p_id"] = "" + bounds = self.epilogue["ImageProductionStats"]["ActualL15CoverageHRV"].copy() if self.fill_hrv: - bounds['UpperEastColumnActual'] = 1 - bounds['UpperWestColumnActual'] = HRV_NUM_COLUMNS - bounds['LowerEastColumnActual'] = 1 - bounds['LowerWestColumnActual'] = HRV_NUM_COLUMNS - pdict['ncols'] = HRV_NUM_COLUMNS + bounds["UpperEastColumnActual"] = 1 + bounds["UpperWestColumnActual"] = HRV_NUM_COLUMNS + bounds["LowerEastColumnActual"] = 1 + bounds["LowerWestColumnActual"] = HRV_NUM_COLUMNS + pdict["ncols"] = HRV_NUM_COLUMNS upper_south_line = bounds[ - 'LowerNorthLineActual'] - current_first_line - 1 - upper_south_line = min(max(upper_south_line, 0), pdict['nlines']) - lower_coff = (5566 - bounds['LowerEastColumnActual'] + 1) - upper_coff = (5566 - bounds['UpperEastColumnActual'] + 1) + "LowerNorthLineActual"] - current_first_line - 1 + upper_south_line = min(max(upper_south_line, 0), pdict["nlines"]) + lower_coff = (5566 - bounds["LowerEastColumnActual"] + 1) + upper_coff = (5566 - bounds["UpperEastColumnActual"] + 1) # First we look at the lower window - pdict['nlines'] = upper_south_line - pdict['loff'] = loff - upper_south_line - pdict['coff'] = lower_coff - pdict['a_desc'] = area_naming['description'] + pdict["nlines"] = upper_south_line + pdict["loff"] = loff - upper_south_line + pdict["coff"] = lower_coff + pdict["a_desc"] = area_naming["description"] lower_area_extent = self._get_area_extent(pdict) lower_area = get_area_definition(pdict, lower_area_extent) # Now the upper window - pdict['nlines'] = nlines - upper_south_line - pdict['loff'] = loff - pdict['nlines'] - upper_south_line - pdict['coff'] = upper_coff - pdict['a_desc'] = area_naming['description'] + pdict["nlines"] = nlines - upper_south_line + pdict["loff"] = loff - pdict["nlines"] - upper_south_line + pdict["coff"] = upper_coff + pdict["a_desc"] = area_naming["description"] upper_area_extent = self._get_area_extent(pdict) upper_area = get_area_definition(pdict, upper_area_extent) @@ -674,13 +674,13 @@ def get_area_def(self, dsid): def get_dataset(self, key, info): """Get the dataset.""" res = super(HRITMSGFileHandler, self).get_dataset(key, info) - res = self.calibrate(res, key['calibration']) + res = self.calibrate(res, key["calibration"]) - is_calibration = key['calibration'] in ['radiance', 'reflectance', 'brightness_temperature'] + is_calibration = key["calibration"] in ["radiance", "reflectance", "brightness_temperature"] if is_calibration and self.mask_bad_quality_scan_lines: # noqa: E129 res = self._mask_bad_quality(res) - if key['name'] == 'HRV' and self.fill_hrv: + if key["name"] == "HRV" and self.fill_hrv: res = self.pad_hrv_data(res) self._update_attrs(res, info) self._add_scanline_acq_time(res) @@ -688,17 +688,17 @@ def get_dataset(self, key, info): def pad_hrv_data(self, res): """Add empty pixels around the HRV.""" - logger.debug('Padding HRV data to full disk') - nlines = int(self.mda['number_of_lines']) + logger.debug("Padding HRV data to full disk") + nlines = int(self.mda["number_of_lines"]) - segment_number = self.mda['segment_sequence_number'] + segment_number = self.mda["segment_sequence_number"] current_first_line = (segment_number - - self.mda['planned_start_segment_number']) * nlines - bounds = self.epilogue['ImageProductionStats']['ActualL15CoverageHRV'] + - self.mda["planned_start_segment_number"]) * nlines + bounds = self.epilogue["ImageProductionStats"]["ActualL15CoverageHRV"] upper_south_line = bounds[ - 'LowerNorthLineActual'] - current_first_line - 1 + "LowerNorthLineActual"] - current_first_line - 1 upper_south_line = min(max(upper_south_line, 0), nlines) data_list = list() @@ -706,18 +706,18 @@ def pad_hrv_data(self, res): # we have some of the lower window data_lower = pad_data_horizontally(res[:upper_south_line, :].data, (upper_south_line, HRV_NUM_COLUMNS), - bounds['LowerEastColumnActual'], - bounds['LowerWestColumnActual']) + bounds["LowerEastColumnActual"], + bounds["LowerWestColumnActual"]) data_list.append(data_lower) if upper_south_line < nlines: # we have some of the upper window data_upper = pad_data_horizontally(res[upper_south_line:, :].data, (nlines - upper_south_line, HRV_NUM_COLUMNS), - bounds['UpperEastColumnActual'], - bounds['UpperWestColumnActual']) + bounds["UpperEastColumnActual"], + bounds["UpperWestColumnActual"]) data_list.append(data_upper) - return xr.DataArray(da.vstack(data_list), dims=('y', 'x'), attrs=res.attrs.copy()) + return xr.DataArray(da.vstack(data_list), dims=("y", "x"), attrs=res.attrs.copy()) def calibrate(self, data, calibration): """Calibrate the data.""" @@ -733,9 +733,9 @@ def calibrate(self, data, calibration): def _mask_bad_quality(self, data): """Mask scanlines with bad quality.""" - line_validity = self.mda['image_segment_line_quality']['line_validity'] - line_radiometric_quality = self.mda['image_segment_line_quality']['line_radiometric_quality'] - line_geometric_quality = self.mda['image_segment_line_quality']['line_geometric_quality'] + line_validity = self.mda["image_segment_line_quality"]["line_validity"] + line_radiometric_quality = self.mda["image_segment_line_quality"]["line_radiometric_quality"] + line_geometric_quality = self.mda["image_segment_line_quality"]["line_geometric_quality"] data = mask_bad_quality(data, line_validity, line_geometric_quality, line_radiometric_quality) return data @@ -743,7 +743,7 @@ def _get_raw_mda(self): """Compile raw metadata to be included in the dataset attributes.""" # Metadata from segment header (excluding items which vary among the different segments) raw_mda = copy.deepcopy(self.mda) - for key in ('image_segment_line_quality', 'segment_sequence_number', 'annotation_header', 'loff'): + for key in ("image_segment_line_quality", "segment_sequence_number", "annotation_header", "loff"): raw_mda.pop(key, None) # Metadata from prologue and epilogue (large arrays removed) @@ -754,50 +754,50 @@ def _get_raw_mda(self): def _add_scanline_acq_time(self, dataset): """Add scanline acquisition time to the given dataset.""" - tline = self.mda['image_segment_line_quality']['line_mean_acquisition'] - acq_time = get_cds_time(days=tline['days'], msecs=tline['milliseconds']) + tline = self.mda["image_segment_line_quality"]["line_mean_acquisition"] + acq_time = get_cds_time(days=tline["days"], msecs=tline["milliseconds"]) add_scanline_acq_time(dataset, acq_time) def _update_attrs(self, res, info): """Update dataset attributes.""" - res.attrs['units'] = info['units'] - res.attrs['wavelength'] = info['wavelength'] - res.attrs['standard_name'] = info['standard_name'] - res.attrs['platform_name'] = self.platform_name - res.attrs['sensor'] = 'seviri' - res.attrs['nominal_start_time'] = self.nominal_start_time, - res.attrs['nominal_end_time'] = self.nominal_end_time, - res.attrs['time_parameters'] = { - 'nominal_start_time': self.nominal_start_time, - 'nominal_end_time': self.nominal_end_time, - 'observation_start_time': self.observation_start_time, - 'observation_end_time': self.observation_end_time, + res.attrs["units"] = info["units"] + res.attrs["wavelength"] = info["wavelength"] + res.attrs["standard_name"] = info["standard_name"] + res.attrs["platform_name"] = self.platform_name + res.attrs["sensor"] = "seviri" + res.attrs["nominal_start_time"] = self.nominal_start_time, + res.attrs["nominal_end_time"] = self.nominal_end_time, + res.attrs["time_parameters"] = { + "nominal_start_time": self.nominal_start_time, + "nominal_end_time": self.nominal_end_time, + "observation_start_time": self.observation_start_time, + "observation_end_time": self.observation_end_time, } - res.attrs['orbital_parameters'] = { - 'projection_longitude': self.mda['projection_parameters']['SSP_longitude'], - 'projection_latitude': self.mda['projection_parameters']['SSP_latitude'], - 'projection_altitude': self.mda['projection_parameters']['h']} - res.attrs['orbital_parameters'].update(self.mda['orbital_parameters']) - res.attrs['georef_offset_corrected'] = self.mda['offset_corrected'] + res.attrs["orbital_parameters"] = { + "projection_longitude": self.mda["projection_parameters"]["SSP_longitude"], + "projection_latitude": self.mda["projection_parameters"]["SSP_latitude"], + "projection_altitude": self.mda["projection_parameters"]["h"]} + res.attrs["orbital_parameters"].update(self.mda["orbital_parameters"]) + res.attrs["georef_offset_corrected"] = self.mda["offset_corrected"] if self.include_raw_metadata: - res.attrs['raw_metadata'] = self._get_raw_mda() + res.attrs["raw_metadata"] = self._get_raw_mda() def _get_calib_coefs(self, channel_name): """Get coefficients for calibration from counts to radiance.""" - band_idx = self.mda['spectral_channel_id'] - 1 + band_idx = self.mda["spectral_channel_id"] - 1 coefs_nominal = self.prologue["RadiometricProcessing"][ "Level15ImageCalibration"] - coefs_gsics = self.prologue["RadiometricProcessing"]['MPEFCalFeedback'] - radiance_types = self.prologue['ImageDescription'][ - 'Level15ImageProduction']['PlannedChanProcessing'] + coefs_gsics = self.prologue["RadiometricProcessing"]["MPEFCalFeedback"] + radiance_types = self.prologue["ImageDescription"][ + "Level15ImageProduction"]["PlannedChanProcessing"] return create_coef_dict( coefs_nominal=( - coefs_nominal['CalSlope'][band_idx], - coefs_nominal['CalOffset'][band_idx] + coefs_nominal["CalSlope"][band_idx], + coefs_nominal["CalOffset"][band_idx] ), coefs_gsics=( - coefs_gsics['GSICSCalCoeff'][band_idx], - coefs_gsics['GSICSOffsetCount'][band_idx] + coefs_gsics["GSICSCalCoeff"][band_idx], + coefs_gsics["GSICSOffsetCount"][band_idx] ), ext_coefs=self.ext_calib_coefs.get(channel_name, {}), radiance_type=radiance_types[band_idx] @@ -808,7 +808,7 @@ def pad_data(data, final_size, east_bound, west_bound): """Pad the data given east and west bounds and the desired size.""" nlines = final_size[0] if west_bound - east_bound != data.shape[1] - 1: - raise IndexError('East and west bounds do not match data shape') + raise IndexError("East and west bounds do not match data shape") padding_east = da.zeros((nlines, east_bound - 1), dtype=data.dtype, chunks=CHUNK_SIZE) padding_west = da.zeros((nlines, (final_size[1] - west_bound)), diff --git a/satpy/readers/seviri_l1b_icare.py b/satpy/readers/seviri_l1b_icare.py index 38c4360744..2024c46532 100644 --- a/satpy/readers/seviri_l1b_icare.py +++ b/satpy/readers/seviri_l1b_icare.py @@ -32,8 +32,8 @@ CHANN is the channel (i.e: HRV, IR016, WV073, etc) VX-XX is the processing version number -Example -------- +Example: +-------- Here is an example how to read the data in satpy: .. code-block:: python @@ -86,18 +86,18 @@ def __init__(self, filename, filename_info, filetype_info): filename_info, filetype_info) # These are VIS bands - self.ref_bands = ['HRV', 'VIS006', 'VIS008', 'IR_016'] + self.ref_bands = ["HRV", "VIS006", "VIS008", "IR_016"] # And these are IR bands - self.bt_bands = ['IR_039', 'IR_062', 'IR_073', - 'IR_087', 'IR_097', 'IR_108', - 'IR_120', 'IR_134', - 'WV_062', 'WV_073'] + self.bt_bands = ["IR_039", "IR_062", "IR_073", + "IR_087", "IR_097", "IR_108", + "IR_120", "IR_134", + "WV_062", "WV_073"] @property def sensor_name(self): """Get the sensor name.""" # the sensor and platform names are stored together, eg: MSG1/SEVIRI - attr = self['/attr/Sensors'] + attr = self["/attr/Sensors"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)).lower() else: @@ -105,14 +105,14 @@ def sensor_name(self): plat = attr[0:4] sens = attr[5:] # icare uses non-standard platform names - if plat == 'msg1': - plat = 'Meteosat-08' - elif plat == 'msg2': - plat = 'Meteosat-09' - elif plat == 'msg3': - plat = 'Meteosat-10' - elif plat == 'msg4': - plat = 'Meteosat-11' + if plat == "msg1": + plat = "Meteosat-08" + elif plat == "msg2": + plat = "Meteosat-09" + elif plat == "msg3": + plat = "Meteosat-10" + elif plat == "msg4": + plat = "Meteosat-11" else: raise NameError("Unsupported satellite platform:"+plat) return [plat, sens] @@ -120,7 +120,7 @@ def sensor_name(self): @property def satlon(self): """Get the satellite longitude.""" - attr = self['/attr/Sub_Satellite_Longitude'] + attr = self["/attr/Sub_Satellite_Longitude"] if isinstance(attr, np.ndarray): attr = float(attr.astype(str)) return attr @@ -128,7 +128,7 @@ def satlon(self): @property def projlon(self): """Get the projection longitude.""" - attr = self['/attr/Projection_Longitude'] + attr = self["/attr/Projection_Longitude"] if isinstance(attr, np.ndarray): attr = float(attr.astype(str)) return attr @@ -136,11 +136,11 @@ def projlon(self): @property def projection(self): """Get the projection.""" - attr = self['/attr/Geographic_Projection'] + attr = self["/attr/Geographic_Projection"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)) attr = attr.lower() - if attr != 'geos': + if attr != "geos": raise NotImplementedError("Only the GEOS projection is supported.\ This is:", attr) return attr @@ -148,7 +148,7 @@ def projection(self): @property def zone(self): """Get the zone.""" - attr = self['/attr/Zone'] + attr = self["/attr/Zone"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)).lower() return attr @@ -156,7 +156,7 @@ def zone(self): @property def res(self): """Get the resolution.""" - attr = self['/attr/Nadir_Pixel_Size'] + attr = self["/attr/Nadir_Pixel_Size"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)).lower() return float(attr) @@ -164,7 +164,7 @@ def res(self): @property def end_time(self): """Get the end time.""" - attr = self['/attr/End_Acquisition_Date'] + attr = self["/attr/End_Acquisition_Date"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)) # In some versions milliseconds are present, sometimes not. @@ -177,7 +177,7 @@ def end_time(self): @property def start_time(self): """Get the start time.""" - attr = self['/attr/Beginning_Acquisition_Date'] + attr = self["/attr/Beginning_Acquisition_Date"] if isinstance(attr, np.ndarray): attr = str(attr.astype(str)) # In some versions milliseconds are present, sometimes not. @@ -190,7 +190,7 @@ def start_time(self): @property def alt(self): """Get the altitude.""" - attr = self['/attr/Altitude'] + attr = self["/attr/Altitude"] if isinstance(attr, np.ndarray): attr = attr.astype(str) attr = float(attr) @@ -201,7 +201,7 @@ def alt(self): @property def geoloc(self): """Get the geolocation.""" - attr = self['/attr/Geolocation'] + attr = self["/attr/Geolocation"] if isinstance(attr, np.ndarray): attr = attr.astype(str) cfac = float(attr[0]) @@ -217,32 +217,32 @@ def get_metadata(self, data, ds_info): mda.update(ds_info) geoloc = self.geoloc mda.update({ - 'start_time': self.start_time, - 'end_time': self.end_time, - 'platform_name': self.sensor_name[0], - 'sensor': self.sensor_name[1], - 'zone': self.zone, - 'projection_altitude': self.alt, - 'cfac': geoloc[0], - 'lfac': geoloc[1], - 'coff': geoloc[2], - 'loff': geoloc[3], - 'resolution': self.res, - 'satellite_actual_longitude': self.satlon, - 'projection_longitude': self.projlon, - 'projection_type': self.projection + "start_time": self.start_time, + "end_time": self.end_time, + "platform_name": self.sensor_name[0], + "sensor": self.sensor_name[1], + "zone": self.zone, + "projection_altitude": self.alt, + "cfac": geoloc[0], + "lfac": geoloc[1], + "coff": geoloc[2], + "loff": geoloc[3], + "resolution": self.res, + "satellite_actual_longitude": self.satlon, + "projection_longitude": self.projlon, + "projection_type": self.projection }) return mda def _get_dsname(self, ds_id): """Return the correct dataset name based on requested band.""" - if ds_id['name'] in self.ref_bands: - ds_get_name = 'Normalized_Radiance' - elif ds_id['name'] in self.bt_bands: - ds_get_name = 'Brightness_Temperature' + if ds_id["name"] in self.ref_bands: + ds_get_name = "Normalized_Radiance" + elif ds_id["name"] in self.bt_bands: + ds_get_name = "Brightness_Temperature" else: - raise NameError("Datset type "+ds_id['name']+" is not supported.") + raise NameError("Datset type "+ds_id["name"]+" is not supported.") return ds_get_name def get_dataset(self, ds_id, ds_info): @@ -250,50 +250,50 @@ def get_dataset(self, ds_id, ds_info): ds_get_name = self._get_dsname(ds_id) data = self[ds_get_name] data.attrs = self.get_metadata(data, ds_info) - fill = data.attrs.pop('_FillValue') - offset = data.attrs.get('add_offset') - scale_factor = data.attrs.get('scale_factor') + fill = data.attrs.pop("_FillValue") + offset = data.attrs.get("add_offset") + scale_factor = data.attrs.get("scale_factor") data = data.where(data != fill) data = data.astype(np.float32) if scale_factor is not None and offset is not None: data = data * scale_factor data = data + offset # Now we correct range from 0-1 to 0-100 for VIS: - if ds_id['name'] in self.ref_bands: + if ds_id["name"] in self.ref_bands: data = data * 100. return data def get_area_def(self, ds_id): """Get the area def.""" ds_get_name = self._get_dsname(ds_id) - ds_shape = self[ds_get_name + '/shape'] + ds_shape = self[ds_get_name + "/shape"] geoloc = self.geoloc pdict = {} - pdict['cfac'] = np.int32(geoloc[0]) - pdict['lfac'] = np.int32(geoloc[1]) - pdict['coff'] = np.float32(geoloc[2]) - pdict['loff'] = -np.float32(geoloc[3]) + pdict["cfac"] = np.int32(geoloc[0]) + pdict["lfac"] = np.int32(geoloc[1]) + pdict["coff"] = np.float32(geoloc[2]) + pdict["loff"] = -np.float32(geoloc[3]) # Unfortunately this dataset does not store a, b or h. # We assume a and b here, and calculate h from altitude # a and b are from SEVIRI data HRIT header (201912101300) - pdict['a'] = 6378169 - pdict['b'] = 6356583.8 - pdict['h'] = self.alt - pdict['a'] - pdict['ssp_lon'] = self.projlon - pdict['ncols'] = int(ds_shape[0]) - pdict['nlines'] = int(ds_shape[1]) + pdict["a"] = 6378169 + pdict["b"] = 6356583.8 + pdict["h"] = self.alt - pdict["a"] + pdict["ssp_lon"] = self.projlon + pdict["ncols"] = int(ds_shape[0]) + pdict["nlines"] = int(ds_shape[1]) # Force scandir to SEVIRI default, not known from file - pdict['scandir'] = 'S2N' - pdict['a_name'] = 'geosmsg' - if ds_id['name'] == 'HRV': - pdict['a_desc'] = 'MSG/SEVIRI HRV channel area' - pdict['p_id'] = 'msg_hires' + pdict["scandir"] = "S2N" + pdict["a_name"] = "geosmsg" + if ds_id["name"] == "HRV": + pdict["a_desc"] = "MSG/SEVIRI HRV channel area" + pdict["p_id"] = "msg_hires" else: - pdict['a_desc'] = 'MSG/SEVIRI low resolution channel area' - pdict['p_id'] = 'msg_lowres' + pdict["a_desc"] = "MSG/SEVIRI low resolution channel area" + pdict["p_id"] = "msg_lowres" aex = get_area_extent(pdict) area = get_area_definition(pdict, aex) diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index cdad865f0c..361dd1bb50 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -37,8 +37,8 @@ To see the full list of arguments that can be provided, look into the documentation of :class:`NativeMSGFileHandler`. -Example -------- +Example: +-------- Here is an example how to read the data in satpy. NOTE: When loading the data, the orientation @@ -140,9 +140,9 @@ from satpy.readers.utils import reduce_mda from satpy.utils import get_legacy_chunk_size -logger = logging.getLogger('native_msg') +logger = logging.getLogger("native_msg") CHUNK_SIZE = get_legacy_chunk_size() -ASCII_STARTSWITH = b'FormatName : NATIVE' +ASCII_STARTSWITH = b"FormatName : NATIVE" class NativeMSGFileHandler(BaseFileHandler): @@ -170,7 +170,7 @@ class NativeMSGFileHandler(BaseFileHandler): """ def __init__(self, filename, filename_info, filetype_info, - calib_mode='nominal', fill_disk=False, ext_calib_coefs=None, + calib_mode="nominal", fill_disk=False, ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=100): """Initialize the reader.""" super(NativeMSGFileHandler, self).__init__(filename, @@ -199,33 +199,33 @@ def __init__(self, filename, filename_info, filetype_info, @property def _repeat_cycle_duration(self): """Get repeat cycle duration from the trailer.""" - if self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] == 1: + if self.trailer["15TRAILER"]["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] == 1: return 5 return REPEAT_CYCLE_DURATION @property def nominal_start_time(self): """Get the repeat cycle nominal start time from file header and round it to expected nominal time slot.""" - tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['TrueRepeatCycleStart'] + tm = self.header["15_DATA_HEADER"]["ImageAcquisition"]["PlannedAcquisitionTime"]["TrueRepeatCycleStart"] return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def nominal_end_time(self): """Get the repeat cycle nominal end time from file header and round it to expected nominal time slot.""" - tm = self.header['15_DATA_HEADER']['ImageAcquisition']['PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] + tm = self.header["15_DATA_HEADER"]["ImageAcquisition"]["PlannedAcquisitionTime"]["PlannedRepeatCycleEnd"] return round_nom_time(tm, time_delta=timedelta(minutes=self._repeat_cycle_duration)) @property def observation_start_time(self): """Get observation start time from trailer.""" - return self.trailer['15TRAILER']['ImageProductionStats'][ - 'ActualScanningSummary']['ForwardScanStart'] + return self.trailer["15TRAILER"]["ImageProductionStats"][ + "ActualScanningSummary"]["ForwardScanStart"] @property def observation_end_time(self): """Get observation end time from trailer.""" - return self.trailer['15TRAILER']['ImageProductionStats'][ - 'ActualScanningSummary']['ForwardScanEnd'] + return self.trailer["15TRAILER"]["ImageProductionStats"][ + "ActualScanningSummary"]["ForwardScanEnd"] @property def start_time(self): @@ -240,8 +240,8 @@ def end_time(self): def _get_data_dtype(self): """Get the dtype of the file based on the actual available channels.""" pkhrec = [ - ('GP_PK_HEADER', GSDTRecords.gp_pk_header), - ('GP_PK_SH1', GSDTRecords.gp_pk_sh1) + ("GP_PK_HEADER", GSDTRecords.gp_pk_header), + ("GP_PK_SH1", GSDTRecords.gp_pk_sh1) ] pk_head_dtype = np.dtype(pkhrec) @@ -264,14 +264,14 @@ def get_lrec(cols): # each pixel is 10-bits -> one line of data has 25% more bytes # than the number of columns suggest (10/8 = 1.25) - visir_rec = get_lrec(int(self.mda['number_of_columns'] * 1.25)) + visir_rec = get_lrec(int(self.mda["number_of_columns"] * 1.25)) number_of_visir_channels = len( - [s for s in self.mda['channel_list'] if not s == 'HRV']) - drec = [('visir', (visir_rec, number_of_visir_channels))] + [s for s in self.mda["channel_list"] if not s == "HRV"]) + drec = [("visir", (visir_rec, number_of_visir_channels))] - if self.mda['available_channels']['HRV']: - hrv_rec = get_lrec(int(self.mda['hrv_number_of_columns'] * 1.25)) - drec.append(('hrv', (hrv_rec, 3))) + if self.mda["available_channels"]["HRV"]: + hrv_rec = get_lrec(int(self.mda["hrv_number_of_columns"] * 1.25)) + drec.append(("hrv", (hrv_rec, 3))) return np.dtype(drec) @@ -282,51 +282,51 @@ def _get_memmap(self): hdr_size = self.header_type.itemsize return np.memmap(fp, dtype=data_dtype, - shape=(self.mda['number_of_lines'],), + shape=(self.mda["number_of_lines"],), offset=hdr_size, mode="r") def _read_header(self): """Read the header info.""" self.header.update(read_header(self.filename)) - if '15_SECONDARY_PRODUCT_HEADER' not in self.header: + if "15_SECONDARY_PRODUCT_HEADER" not in self.header: # No archive header, that means we have a complete file # including all channels. - self.header['15_SECONDARY_PRODUCT_HEADER'] = DEFAULT_15_SECONDARY_PRODUCT_HEADER + self.header["15_SECONDARY_PRODUCT_HEADER"] = DEFAULT_15_SECONDARY_PRODUCT_HEADER - data15hd = self.header['15_DATA_HEADER'] - sec15hd = self.header['15_SECONDARY_PRODUCT_HEADER'] + data15hd = self.header["15_DATA_HEADER"] + sec15hd = self.header["15_SECONDARY_PRODUCT_HEADER"] # Set the list of available channels: - self.mda['available_channels'] = get_available_channels(self.header) - self.mda['channel_list'] = [i for i in CHANNEL_NAMES.values() - if self.mda['available_channels'][i]] + self.mda["available_channels"] = get_available_channels(self.header) + self.mda["channel_list"] = [i for i in CHANNEL_NAMES.values() + if self.mda["available_channels"][i]] self.platform_id = data15hd[ - 'SatelliteStatus']['SatelliteDefinition']['SatelliteId'] - self.mda['platform_name'] = "Meteosat-" + SATNUM[self.platform_id] - self.mda['offset_corrected'] = data15hd['GeometricProcessing'][ - 'EarthModel']['TypeOfEarthModel'] == 2 + "SatelliteStatus"]["SatelliteDefinition"]["SatelliteId"] + self.mda["platform_name"] = "Meteosat-" + SATNUM[self.platform_id] + self.mda["offset_corrected"] = data15hd["GeometricProcessing"][ + "EarthModel"]["TypeOfEarthModel"] == 2 - equator_radius = data15hd['GeometricProcessing'][ - 'EarthModel']['EquatorialRadius'] * 1000. + equator_radius = data15hd["GeometricProcessing"][ + "EarthModel"]["EquatorialRadius"] * 1000. north_polar_radius = data15hd[ - 'GeometricProcessing']['EarthModel']['NorthPolarRadius'] * 1000. + "GeometricProcessing"]["EarthModel"]["NorthPolarRadius"] * 1000. south_polar_radius = data15hd[ - 'GeometricProcessing']['EarthModel']['SouthPolarRadius'] * 1000. + "GeometricProcessing"]["EarthModel"]["SouthPolarRadius"] * 1000. polar_radius = (north_polar_radius + south_polar_radius) * 0.5 - ssp_lon = data15hd['ImageDescription'][ - 'ProjectionDescription']['LongitudeOfSSP'] + ssp_lon = data15hd["ImageDescription"][ + "ProjectionDescription"]["LongitudeOfSSP"] - self.mda['projection_parameters'] = {'a': equator_radius, - 'b': polar_radius, - 'h': 35785831.00, - 'ssp_longitude': ssp_lon} + self.mda["projection_parameters"] = {"a": equator_radius, + "b": polar_radius, + "h": 35785831.00, + "ssp_longitude": ssp_lon} - north = int(sec15hd['NorthLineSelectedRectangle']['Value']) - east = int(sec15hd['EastColumnSelectedRectangle']['Value']) - south = int(sec15hd['SouthLineSelectedRectangle']['Value']) - west = int(sec15hd['WestColumnSelectedRectangle']['Value']) + north = int(sec15hd["NorthLineSelectedRectangle"]["Value"]) + east = int(sec15hd["EastColumnSelectedRectangle"]["Value"]) + south = int(sec15hd["SouthLineSelectedRectangle"]["Value"]) + west = int(sec15hd["WestColumnSelectedRectangle"]["Value"]) ncolumns = west - east + 1 nrows = north - south + 1 @@ -335,9 +335,9 @@ def _read_header(self): # the maximum, if so it is a rapid scanning service # or region of interest file if (nrows < VISIR_NUM_LINES) or (ncolumns < VISIR_NUM_COLUMNS): - self.mda['is_full_disk'] = False + self.mda["is_full_disk"] = False else: - self.mda['is_full_disk'] = True + self.mda["is_full_disk"] = True # If the number of columns in the file is not divisible by 4, # UMARF will add extra columns to the file @@ -349,7 +349,7 @@ def _read_header(self): # Check the VISIR calculated column dimension against # the header information - cols_visir_hdr = int(sec15hd['NumberColumnsVISIR']['Value']) + cols_visir_hdr = int(sec15hd["NumberColumnsVISIR"]["Value"]) if cols_visir_hdr != cols_visir: logger.warning( "Number of VISIR columns from the header is incorrect!") @@ -358,21 +358,21 @@ def _read_header(self): # HRV Channel - check if the area is reduced in east west # direction as this affects the number of columns in the file - cols_hrv_hdr = int(sec15hd['NumberColumnsHRV']['Value']) + cols_hrv_hdr = int(sec15hd["NumberColumnsHRV"]["Value"]) if ncolumns < VISIR_NUM_COLUMNS: cols_hrv = cols_hrv_hdr else: cols_hrv = int(cols_hrv_hdr / 2) # self.mda represents the 16bit dimensions not 10bit - self.mda['number_of_lines'] = int(sec15hd['NumberLinesVISIR']['Value']) - self.mda['number_of_columns'] = cols_visir - self.mda['hrv_number_of_lines'] = int(sec15hd["NumberLinesHRV"]['Value']) - self.mda['hrv_number_of_columns'] = cols_hrv + self.mda["number_of_lines"] = int(sec15hd["NumberLinesVISIR"]["Value"]) + self.mda["number_of_columns"] = cols_visir + self.mda["hrv_number_of_lines"] = int(sec15hd["NumberLinesHRV"]["Value"]) + self.mda["hrv_number_of_columns"] = cols_hrv - if '15_MAIN_PRODUCT_HEADER' not in self.header: + if "15_MAIN_PRODUCT_HEADER" not in self.header: logger.info("Quality flag check was not possible due to missing 15_MAIN_PRODUCT_HEADER.") - elif self.header['15_MAIN_PRODUCT_HEADER']['QQOV']['Value'] == 'NOK': + elif self.header["15_MAIN_PRODUCT_HEADER"]["QQOV"]["Value"] == "NOK": warnings.warn( "The quality flag for this file indicates not OK. " "Use this data with caution!", @@ -384,7 +384,7 @@ def _read_trailer(self): hdr_size = self.header_type.itemsize data_size = (self._get_data_dtype().itemsize * - self.mda['number_of_lines']) + self.mda["number_of_lines"]) with open(self.filename) as fp: fp.seek(hdr_size + data_size) @@ -417,27 +417,27 @@ def get_area_def(self, dataset_id): """ pdict = dict() - pdict['a'] = self.mda['projection_parameters']['a'] - pdict['b'] = self.mda['projection_parameters']['b'] - pdict['h'] = self.mda['projection_parameters']['h'] - pdict['ssp_lon'] = self.mda['projection_parameters']['ssp_longitude'] - - area_naming_input_dict = {'platform_name': 'msg', - 'instrument_name': 'seviri', - 'resolution': int(dataset_id['resolution']) + pdict["a"] = self.mda["projection_parameters"]["a"] + pdict["b"] = self.mda["projection_parameters"]["b"] + pdict["h"] = self.mda["projection_parameters"]["h"] + pdict["ssp_lon"] = self.mda["projection_parameters"]["ssp_longitude"] + + area_naming_input_dict = {"platform_name": "msg", + "instrument_name": "seviri", + "resolution": int(dataset_id["resolution"]) } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('seviri', pdict['ssp_lon'])}) + **get_service_mode("seviri", pdict["ssp_lon"])}) - pdict['a_name'] = area_naming['area_id'] - pdict['a_desc'] = area_naming['description'] - pdict['p_id'] = "" + pdict["a_name"] = area_naming["area_id"] + pdict["a_desc"] = area_naming["description"] + pdict["p_id"] = "" area_extent = self.get_area_extent(dataset_id) areas = list() - for aex, nlines, ncolumns in zip(area_extent['area_extent'], area_extent['nlines'], area_extent['ncolumns']): - pdict['nlines'] = nlines - pdict['ncols'] = ncolumns + for aex, nlines, ncolumns in zip(area_extent["area_extent"], area_extent["nlines"], area_extent["ncolumns"]): + pdict["nlines"] = nlines + pdict["ncols"] = ncolumns areas.append(get_area_definition(pdict, aex)) if len(areas) == 2: @@ -459,80 +459,80 @@ def get_area_extent(self, dataset_id): of the area extent is documented in a `developer's memo `_. """ - data15hd = self.header['15_DATA_HEADER'] + data15hd = self.header["15_DATA_HEADER"] # check for Earth model as this affects the north-south and # west-east offsets # section 3.1.4.2 of MSG Level 1.5 Image Data Format Description - earth_model = data15hd['GeometricProcessing']['EarthModel'][ - 'TypeOfEarthModel'] + earth_model = data15hd["GeometricProcessing"]["EarthModel"][ + "TypeOfEarthModel"] if earth_model == 2: ns_offset = 0 we_offset = 0 elif earth_model == 1: ns_offset = -0.5 we_offset = 0.5 - if dataset_id['name'] == 'HRV': + if dataset_id["name"] == "HRV": ns_offset = -1.5 we_offset = 1.5 else: raise NotImplementedError( - 'Unrecognised Earth model: {}'.format(earth_model) + "Unrecognised Earth model: {}".format(earth_model) ) - if dataset_id['name'] == 'HRV': - grid_origin = data15hd['ImageDescription']['ReferenceGridHRV']['GridOrigin'] + if dataset_id["name"] == "HRV": + grid_origin = data15hd["ImageDescription"]["ReferenceGridHRV"]["GridOrigin"] center_point = (HRV_NUM_COLUMNS / 2) - 2 - column_step = data15hd['ImageDescription']['ReferenceGridHRV']['ColumnDirGridStep'] * 1000.0 - line_step = data15hd['ImageDescription']['ReferenceGridHRV']['LineDirGridStep'] * 1000.0 + column_step = data15hd["ImageDescription"]["ReferenceGridHRV"]["ColumnDirGridStep"] * 1000.0 + line_step = data15hd["ImageDescription"]["ReferenceGridHRV"]["LineDirGridStep"] * 1000.0 nlines_fulldisk = HRV_NUM_LINES ncolumns_fulldisk = HRV_NUM_COLUMNS else: - grid_origin = data15hd['ImageDescription']['ReferenceGridVIS_IR']['GridOrigin'] + grid_origin = data15hd["ImageDescription"]["ReferenceGridVIS_IR"]["GridOrigin"] center_point = VISIR_NUM_COLUMNS / 2 - column_step = data15hd['ImageDescription']['ReferenceGridVIS_IR']['ColumnDirGridStep'] * 1000.0 - line_step = data15hd['ImageDescription']['ReferenceGridVIS_IR']['LineDirGridStep'] * 1000.0 + column_step = data15hd["ImageDescription"]["ReferenceGridVIS_IR"]["ColumnDirGridStep"] * 1000.0 + line_step = data15hd["ImageDescription"]["ReferenceGridVIS_IR"]["LineDirGridStep"] * 1000.0 nlines_fulldisk = VISIR_NUM_LINES ncolumns_fulldisk = VISIR_NUM_COLUMNS # Calculations assume grid origin is south-east corner # section 7.2.4 of MSG Level 1.5 Image Data Format Description - origins = {0: 'NW', 1: 'SW', 2: 'SE', 3: 'NE'} + origins = {0: "NW", 1: "SW", 2: "SE", 3: "NE"} if grid_origin != 2: - msg = 'Grid origin not supported number: {}, {} corner'.format( + msg = "Grid origin not supported number: {}, {} corner".format( grid_origin, origins[grid_origin] ) raise NotImplementedError(msg) - aex_data = {'area_extent': [], 'nlines': [], 'ncolumns': []} + aex_data = {"area_extent": [], "nlines": [], "ncolumns": []} img_bounds = self.image_boundaries.get_img_bounds(dataset_id, self.is_roi()) for south_bound, north_bound, east_bound, west_bound in zip(*img_bounds.values()): if self.fill_disk: east_bound, west_bound = 1, ncolumns_fulldisk - if not self.mda['is_full_disk']: + if not self.mda["is_full_disk"]: south_bound, north_bound = 1, nlines_fulldisk nlines = north_bound - south_bound + 1 ncolumns = west_bound - east_bound + 1 - area_dict = {'center_point': center_point, - 'east': east_bound, - 'west': west_bound, - 'south': south_bound, - 'north': north_bound, - 'column_step': column_step, - 'line_step': line_step, - 'column_offset': we_offset, - 'line_offset': ns_offset + area_dict = {"center_point": center_point, + "east": east_bound, + "west": west_bound, + "south": south_bound, + "north": north_bound, + "column_step": column_step, + "line_step": line_step, + "column_offset": we_offset, + "line_offset": ns_offset } aex = calculate_area_extent(area_dict) - aex_data['area_extent'].append(aex) - aex_data['nlines'].append(nlines) - aex_data['ncolumns'].append(ncolumns) + aex_data["area_extent"].append(aex) + aex_data["nlines"].append(nlines) + aex_data["ncolumns"].append(ncolumns) return aex_data @@ -543,28 +543,28 @@ def is_roi(self): of the SEVIRI disk. Hence, if the data does not cover the full disk, nor the standard RSS region in RSS mode, it's assumed to be ROI data. """ - is_rapid_scan = self.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] + is_rapid_scan = self.trailer["15TRAILER"]["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] # Standard RSS data is assumed to cover the three northmost segments, thus consisting of all 3712 columns and # the 1392 northmost lines - nlines = int(self.mda['number_of_lines']) - ncolumns = int(self.mda['number_of_columns']) - north_bound = int(self.header['15_SECONDARY_PRODUCT_HEADER']['NorthLineSelectedRectangle']['Value']) + nlines = int(self.mda["number_of_lines"]) + ncolumns = int(self.mda["number_of_columns"]) + north_bound = int(self.header["15_SECONDARY_PRODUCT_HEADER"]["NorthLineSelectedRectangle"]["Value"]) is_top3segments = (ncolumns == VISIR_NUM_COLUMNS and nlines == 1392 and north_bound == VISIR_NUM_LINES) - return not self.mda['is_full_disk'] and not (is_rapid_scan and is_top3segments) + return not self.mda["is_full_disk"] and not (is_rapid_scan and is_top3segments) def get_dataset(self, dataset_id, dataset_info): """Get the dataset.""" - if dataset_id['name'] not in self.mda['channel_list']: - raise KeyError('Channel % s not available in the file' % dataset_id['name']) - elif dataset_id['name'] not in ['HRV']: + if dataset_id["name"] not in self.mda["channel_list"]: + raise KeyError("Channel % s not available in the file" % dataset_id["name"]) + elif dataset_id["name"] not in ["HRV"]: data = self._get_visir_channel(dataset_id) else: data = self._get_hrv_channel() - xarr = xr.DataArray(data, dims=['y', 'x']).where(data != 0).astype(np.float32) + xarr = xr.DataArray(data, dims=["y", "x"]).where(data != 0).astype(np.float32) if xarr is None: return None @@ -573,34 +573,34 @@ def get_dataset(self, dataset_id, dataset_info): self._add_scanline_acq_time(dataset, dataset_id) self._update_attrs(dataset, dataset_info) - if self.fill_disk and not (dataset_id['name'] != 'HRV' and self.mda['is_full_disk']): + if self.fill_disk and not (dataset_id["name"] != "HRV" and self.mda["is_full_disk"]): padder = Padder(dataset_id, self.image_boundaries.get_img_bounds(dataset_id, self.is_roi()), - self.mda['is_full_disk']) + self.mda["is_full_disk"]) dataset = padder.pad_data(dataset) return dataset def _get_visir_channel(self, dataset_id): - shape = (self.mda['number_of_lines'], self.mda['number_of_columns']) + shape = (self.mda["number_of_lines"], self.mda["number_of_columns"]) # Check if there is only 1 channel in the list as a change # is needed in the array assignment ie channel id is not present - if len(self.mda['channel_list']) == 1: - raw = self.dask_array['visir']['line_data'] + if len(self.mda["channel_list"]) == 1: + raw = self.dask_array["visir"]["line_data"] else: - i = self.mda['channel_list'].index(dataset_id['name']) - raw = self.dask_array['visir']['line_data'][:, i, :] + i = self.mda["channel_list"].index(dataset_id["name"]) + raw = self.dask_array["visir"]["line_data"][:, i, :] data = dec10216(raw.flatten()) data = data.reshape(shape) return data def _get_hrv_channel(self): - shape = (self.mda['hrv_number_of_lines'], self.mda['hrv_number_of_columns']) - shape_layer = (self.mda['number_of_lines'], self.mda['hrv_number_of_columns']) + shape = (self.mda["hrv_number_of_lines"], self.mda["hrv_number_of_columns"]) + shape_layer = (self.mda["number_of_lines"], self.mda["hrv_number_of_columns"]) data_list = [] for i in range(3): - raw = self.dask_array['hrv']['line_data'][:, i, :] + raw = self.dask_array["hrv"]["line_data"][:, i, :] data = dec10216(raw.flatten()) data = data.reshape(shape_layer) data_list.append(data) @@ -610,7 +610,7 @@ def _get_hrv_channel(self): def calibrate(self, data, dataset_id): """Calibrate the data.""" tic = datetime.now() - channel_name = dataset_id['name'] + channel_name = dataset_id["name"] calib = SEVIRICalibrationHandler( platform_id=self.platform_id, channel_name=channel_name, @@ -618,7 +618,7 @@ def calibrate(self, data, dataset_id): calib_mode=self.calib_mode, scan_time=self.observation_start_time ) - res = calib.calibrate(data, dataset_id['calibration']) + res = calib.calibrate(data, dataset_id["calibration"]) logger.debug("Calibration time " + str(datetime.now() - tic)) return res @@ -629,20 +629,20 @@ def _get_calib_coefs(self, channel_name): # hence, this channel index needs to refer to full channel list band_idx = list(CHANNEL_NAMES.values()).index(channel_name) - coefs_nominal = self.header['15_DATA_HEADER'][ - 'RadiometricProcessing']['Level15ImageCalibration'] - coefs_gsics = self.header['15_DATA_HEADER'][ - 'RadiometricProcessing']['MPEFCalFeedback'] - radiance_types = self.header['15_DATA_HEADER']['ImageDescription'][ - 'Level15ImageProduction']['PlannedChanProcessing'] + coefs_nominal = self.header["15_DATA_HEADER"][ + "RadiometricProcessing"]["Level15ImageCalibration"] + coefs_gsics = self.header["15_DATA_HEADER"][ + "RadiometricProcessing"]["MPEFCalFeedback"] + radiance_types = self.header["15_DATA_HEADER"]["ImageDescription"][ + "Level15ImageProduction"]["PlannedChanProcessing"] return create_coef_dict( coefs_nominal=( - coefs_nominal['CalSlope'][band_idx], - coefs_nominal['CalOffset'][band_idx] + coefs_nominal["CalSlope"][band_idx], + coefs_nominal["CalOffset"][band_idx] ), coefs_gsics=( - coefs_gsics['GSICSCalCoeff'][band_idx], - coefs_gsics['GSICSOffsetCount'][band_idx] + coefs_gsics["GSICSCalCoeff"][band_idx], + coefs_gsics["GSICSOffsetCount"][band_idx] ), ext_coefs=self.ext_calib_coefs.get(channel_name, {}), radiance_type=radiance_types[band_idx] @@ -650,69 +650,69 @@ def _get_calib_coefs(self, channel_name): def _add_scanline_acq_time(self, dataset, dataset_id): """Add scanline acquisition time to the given dataset.""" - if dataset_id['name'] == 'HRV': + if dataset_id["name"] == "HRV": tline = self._get_acq_time_hrv() else: tline = self._get_acq_time_visir(dataset_id) - acq_time = get_cds_time(days=tline['Days'], msecs=tline['Milliseconds']) + acq_time = get_cds_time(days=tline["Days"], msecs=tline["Milliseconds"]) add_scanline_acq_time(dataset, acq_time) def _get_acq_time_hrv(self): """Get raw acquisition time for HRV channel.""" - tline = self.dask_array['hrv']['acq_time'] + tline = self.dask_array["hrv"]["acq_time"] tline0 = tline[:, 0] tline1 = tline[:, 1] tline2 = tline[:, 2] return da.stack((tline0, tline1, tline2), axis=1).reshape( - self.mda['hrv_number_of_lines']).compute() + self.mda["hrv_number_of_lines"]).compute() def _get_acq_time_visir(self, dataset_id): """Get raw acquisition time for VIS/IR channels.""" # Check if there is only 1 channel in the list as a change # is needed in the array assignment, i.e. channel id is not present - if len(self.mda['channel_list']) == 1: - return self.dask_array['visir']['acq_time'].compute() - i = self.mda['channel_list'].index(dataset_id['name']) - return self.dask_array['visir']['acq_time'][:, i].compute() + if len(self.mda["channel_list"]) == 1: + return self.dask_array["visir"]["acq_time"].compute() + i = self.mda["channel_list"].index(dataset_id["name"]) + return self.dask_array["visir"]["acq_time"][:, i].compute() def _update_attrs(self, dataset, dataset_info): """Update dataset attributes.""" - dataset.attrs['units'] = dataset_info['units'] - dataset.attrs['wavelength'] = dataset_info['wavelength'] - dataset.attrs['standard_name'] = dataset_info['standard_name'] - dataset.attrs['platform_name'] = self.mda['platform_name'] - dataset.attrs['sensor'] = 'seviri' - dataset.attrs['georef_offset_corrected'] = self.mda[ - 'offset_corrected'] - dataset.attrs['time_parameters'] = { - 'nominal_start_time': self.nominal_start_time, - 'nominal_end_time': self.nominal_end_time, - 'observation_start_time': self.observation_start_time, - 'observation_end_time': self.observation_end_time, + dataset.attrs["units"] = dataset_info["units"] + dataset.attrs["wavelength"] = dataset_info["wavelength"] + dataset.attrs["standard_name"] = dataset_info["standard_name"] + dataset.attrs["platform_name"] = self.mda["platform_name"] + dataset.attrs["sensor"] = "seviri" + dataset.attrs["georef_offset_corrected"] = self.mda[ + "offset_corrected"] + dataset.attrs["time_parameters"] = { + "nominal_start_time": self.nominal_start_time, + "nominal_end_time": self.nominal_end_time, + "observation_start_time": self.observation_start_time, + "observation_end_time": self.observation_end_time, } - dataset.attrs['orbital_parameters'] = self._get_orbital_parameters() + dataset.attrs["orbital_parameters"] = self._get_orbital_parameters() if self.include_raw_metadata: - dataset.attrs['raw_metadata'] = reduce_mda( + dataset.attrs["raw_metadata"] = reduce_mda( self.header, max_size=self.mda_max_array_size ) def _get_orbital_parameters(self): orbital_parameters = { - 'projection_longitude': self.mda['projection_parameters'][ - 'ssp_longitude'], - 'projection_latitude': 0., - 'projection_altitude': self.mda['projection_parameters']['h'], - 'satellite_nominal_longitude': self.header['15_DATA_HEADER'][ - 'SatelliteStatus']['SatelliteDefinition'][ - 'NominalLongitude'], - 'satellite_nominal_latitude': 0.0 + "projection_longitude": self.mda["projection_parameters"][ + "ssp_longitude"], + "projection_latitude": 0., + "projection_altitude": self.mda["projection_parameters"]["h"], + "satellite_nominal_longitude": self.header["15_DATA_HEADER"][ + "SatelliteStatus"]["SatelliteDefinition"][ + "NominalLongitude"], + "satellite_nominal_latitude": 0.0 } try: actual_lon, actual_lat, actual_alt = self.satpos orbital_parameters.update({ - 'satellite_actual_longitude': actual_lon, - 'satellite_actual_latitude': actual_lat, - 'satellite_actual_altitude': actual_alt + "satellite_actual_longitude": actual_lon, + "satellite_actual_latitude": actual_lat, + "satellite_actual_altitude": actual_alt }) except NoValidOrbitParams as err: logger.warning(err) @@ -726,14 +726,14 @@ def satpos(self): Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ - poly_finder = OrbitPolynomialFinder(self.header['15_DATA_HEADER'][ - 'SatelliteStatus']['Orbit']['OrbitPolynomial']) + poly_finder = OrbitPolynomialFinder(self.header["15_DATA_HEADER"][ + "SatelliteStatus"]["Orbit"]["OrbitPolynomial"]) orbit_polynomial = poly_finder.get_orbit_polynomial(self.start_time) return get_satpos( orbit_polynomial=orbit_polynomial, time=self.observation_start_time, - semi_major_axis=self.mda['projection_parameters']['a'], - semi_minor_axis=self.mda['projection_parameters']['b'] + semi_major_axis=self.mda["projection_parameters"]["a"], + semi_minor_axis=self.mda["projection_parameters"]["b"] ) @@ -749,13 +749,13 @@ def __init__(self, header, trailer, mda): def get_img_bounds(self, dataset_id, is_roi): """Get image line and column boundaries. - returns: + Returns: Dictionary with the four keys 'south_bound', 'north_bound', 'east_bound' and 'west_bound', each containing a list of the respective line/column numbers of the image boundaries. Lists (rather than scalars) are returned since the HRV data in FES mode contain data from two windows/areas. """ - if dataset_id['name'] == 'HRV' and not is_roi: + if dataset_id["name"] == "HRV" and not is_roi: img_bounds = self._get_hrv_actual_img_bounds() else: img_bounds = self._get_selected_img_bounds(dataset_id) @@ -766,28 +766,28 @@ def get_img_bounds(self, dataset_id, is_roi): def _get_hrv_actual_img_bounds(self): """Get HRV (if not ROI) image boundaries from the ActualL15CoverageHRV information stored in the trailer.""" - hrv_bounds = self._trailer['15TRAILER']['ImageProductionStats']['ActualL15CoverageHRV'] + hrv_bounds = self._trailer["15TRAILER"]["ImageProductionStats"]["ActualL15CoverageHRV"] - img_bounds = {'south_bound': [], 'north_bound': [], 'east_bound': [], 'west_bound': []} - for hrv_window in ['Lower', 'Upper']: - img_bounds['south_bound'].append(hrv_bounds['%sSouthLineActual' % hrv_window]) - img_bounds['north_bound'].append(hrv_bounds['%sNorthLineActual' % hrv_window]) - img_bounds['east_bound'].append(hrv_bounds['%sEastColumnActual' % hrv_window]) - img_bounds['west_bound'].append(hrv_bounds['%sWestColumnActual' % hrv_window]) + img_bounds = {"south_bound": [], "north_bound": [], "east_bound": [], "west_bound": []} + for hrv_window in ["Lower", "Upper"]: + img_bounds["south_bound"].append(hrv_bounds["%sSouthLineActual" % hrv_window]) + img_bounds["north_bound"].append(hrv_bounds["%sNorthLineActual" % hrv_window]) + img_bounds["east_bound"].append(hrv_bounds["%sEastColumnActual" % hrv_window]) + img_bounds["west_bound"].append(hrv_bounds["%sWestColumnActual" % hrv_window]) # Data from the upper hrv window are only available in FES mode - if not self._mda['is_full_disk']: + if not self._mda["is_full_disk"]: break return img_bounds def _get_selected_img_bounds(self, dataset_id): """Get VISIR and HRV (if ROI) image boundaries from the SelectedRectangle information stored in the header.""" - sec15hd = self._header['15_SECONDARY_PRODUCT_HEADER'] - south_bound = int(sec15hd['SouthLineSelectedRectangle']['Value']) - east_bound = int(sec15hd['EastColumnSelectedRectangle']['Value']) + sec15hd = self._header["15_SECONDARY_PRODUCT_HEADER"] + south_bound = int(sec15hd["SouthLineSelectedRectangle"]["Value"]) + east_bound = int(sec15hd["EastColumnSelectedRectangle"]["Value"]) - if dataset_id['name'] == 'HRV': + if dataset_id["name"] == "HRV": nlines, ncolumns = self._get_hrv_img_shape() south_bound = self._convert_visir_bound_to_hrv(south_bound) east_bound = self._convert_visir_bound_to_hrv(east_bound) @@ -797,19 +797,19 @@ def _get_selected_img_bounds(self, dataset_id): north_bound = south_bound + nlines - 1 west_bound = east_bound + ncolumns - 1 - img_bounds = {'south_bound': [south_bound], 'north_bound': [north_bound], - 'east_bound': [east_bound], 'west_bound': [west_bound]} + img_bounds = {"south_bound": [south_bound], "north_bound": [north_bound], + "east_bound": [east_bound], "west_bound": [west_bound]} return img_bounds def _get_hrv_img_shape(self): - nlines = int(self._mda['hrv_number_of_lines']) - ncolumns = int(self._mda['hrv_number_of_columns']) + nlines = int(self._mda["hrv_number_of_lines"]) + ncolumns = int(self._mda["hrv_number_of_columns"]) return nlines, ncolumns def _get_visir_img_shape(self): - nlines = int(self._mda['number_of_lines']) - ncolumns = int(self._mda['number_of_columns']) + nlines = int(self._mda["number_of_lines"]) + ncolumns = int(self._mda["number_of_columns"]) return nlines, ncolumns @staticmethod @@ -824,7 +824,7 @@ def _check_for_valid_bounds(img_bounds): no_empty = (min(len_img_bounds) > 0) if not (same_lengths and no_empty): - raise ValueError('Invalid image boundaries') + raise ValueError("Invalid image boundaries") class Padder: @@ -835,14 +835,14 @@ def __init__(self, dataset_id, img_bounds, is_full_disk): self._img_bounds = img_bounds self._is_full_disk = is_full_disk - if dataset_id['name'] == 'HRV': + if dataset_id["name"] == "HRV": self._final_shape = (HRV_NUM_LINES, HRV_NUM_COLUMNS) else: self._final_shape = (VISIR_NUM_LINES, VISIR_NUM_COLUMNS) def pad_data(self, dataset): """Pad data to full disk with empty pixels.""" - logger.debug('Padding data to full disk') + logger.debug("Padding data to full disk") data_list = [] for south_bound, north_bound, east_bound, west_bound in zip(*self._img_bounds.values()): @@ -857,7 +857,7 @@ def pad_data(self, dataset): if not self._is_full_disk: padded_data = pad_data_vertically(padded_data, self._final_shape, south_bound, north_bound) - return xr.DataArray(padded_data, dims=('y', 'x'), attrs=dataset.attrs.copy()) + return xr.DataArray(padded_data, dims=("y", "x"), attrs=dataset.attrs.copy()) def _extract_data_to_pad(self, dataset, south_bound, north_bound): """Extract the data that shall be padded. @@ -875,19 +875,19 @@ def _extract_data_to_pad(self, dataset, south_bound, north_bound): def get_available_channels(header): """Get the available channels from the header information.""" - channels_str = header['15_SECONDARY_PRODUCT_HEADER'][ - 'SelectedBandIDs']['Value'] + channels_str = header["15_SECONDARY_PRODUCT_HEADER"][ + "SelectedBandIDs"]["Value"] available_channels = {} for idx, char in zip(range(12), channels_str): - available_channels[CHANNEL_NAMES[idx + 1]] = (char == 'X') + available_channels[CHANNEL_NAMES[idx + 1]] = (char == "X") return available_channels def has_archive_header(filename): """Check whether the file includes an ASCII archive header.""" - with open(filename, mode='rb') as istream: + with open(filename, mode="rb") as istream: return istream.read(36) == ASCII_STARTSWITH diff --git a/satpy/readers/seviri_l1b_native_hdr.py b/satpy/readers/seviri_l1b_native_hdr.py index 8c0212a6f2..56c5c0c3c9 100644 --- a/satpy/readers/seviri_l1b_native_hdr.py +++ b/satpy/readers/seviri_l1b_native_hdr.py @@ -39,37 +39,37 @@ class GSDTRecords(object): # 4 bytes gp_cpu_address = [ - ('Qualifier_1', np.uint8), - ('Qualifier_2', np.uint8), - ('Qualifier_3', np.uint8), - ('Qualifier_4', np.uint8) + ("Qualifier_1", np.uint8), + ("Qualifier_2", np.uint8), + ("Qualifier_3", np.uint8), + ("Qualifier_4", np.uint8) ] # 22 bytes gp_pk_header = [ - ('HeaderVersionNo', np.uint8), - ('PacketType', np.uint8), - ('SubHeaderType', np.uint8), - ('SourceFacilityId', gp_fac_id), - ('SourceEnvId', gp_fac_env), - ('SourceInstanceId', np.uint8), - ('SourceSUId', gp_su_id), - ('SourceCPUId', gp_cpu_address), - ('DestFacilityId', gp_fac_id), - ('DestEnvId', gp_fac_env), - ('SequenceCount', np.uint16), - ('PacketLength', np.int32) + ("HeaderVersionNo", np.uint8), + ("PacketType", np.uint8), + ("SubHeaderType", np.uint8), + ("SourceFacilityId", gp_fac_id), + ("SourceEnvId", gp_fac_env), + ("SourceInstanceId", np.uint8), + ("SourceSUId", gp_su_id), + ("SourceCPUId", gp_cpu_address), + ("DestFacilityId", gp_fac_id), + ("DestEnvId", gp_fac_env), + ("SequenceCount", np.uint16), + ("PacketLength", np.int32) ] # 16 bytes gp_pk_sh1 = [ - ('SubHeaderVersionNo', np.uint8), - ('ChecksumFlag', bool), - ('Acknowledgement', (np.uint8, 4)), - ('ServiceType', gp_svce_type), - ('ServiceSubtype', np.uint8), - ('PacketTime', time_cds_short), - ('SpacecraftId', gp_sc_id) + ("SubHeaderVersionNo", np.uint8), + ("ChecksumFlag", bool), + ("Acknowledgement", (np.uint8, 4)), + ("ServiceType", gp_svce_type), + ("ServiceSubtype", np.uint8), + ("PacketTime", time_cds_short), + ("SpacecraftId", gp_sc_id) ] @@ -83,17 +83,17 @@ def get(self, with_archive_header): record = [] if with_archive_header: record += [ - ('15_MAIN_PRODUCT_HEADER', L15MainProductHeaderRecord().get()), - ('15_SECONDARY_PRODUCT_HEADER', + ("15_MAIN_PRODUCT_HEADER", L15MainProductHeaderRecord().get()), + ("15_SECONDARY_PRODUCT_HEADER", L15SecondaryProductHeaderRecord().get()), ] record += [ - ('GP_PK_HEADER', GSDTRecords.gp_pk_header), - ('GP_PK_SH1', GSDTRecords.gp_pk_sh1), - ('15_DATA_HEADER', L15DataHeaderRecord().get()) + ("GP_PK_HEADER", GSDTRecords.gp_pk_header), + ("GP_PK_SH1", GSDTRecords.gp_pk_sh1), + ("15_DATA_HEADER", L15DataHeaderRecord().get()) ] - return np.dtype(record).newbyteorder('>') + return np.dtype(record).newbyteorder(">") class L15PhData(object): @@ -101,8 +101,8 @@ class L15PhData(object): # 80 bytes l15_ph_data = [ - ('Name', 'S30'), - ('Value', 'S50') + ("Name", "S30"), + ("Value", "S50") ] @@ -118,39 +118,39 @@ def get(self): l15_ph_data = L15PhData.l15_ph_data l15_ph_data_identification = [ - ('Name', 'S30'), - ('Size', 'S16'), - ('Address', 'S16') + ("Name", "S30"), + ("Size", "S16"), + ("Address", "S16") ] # 3674 bytes record = [ - ('FormatName', l15_ph_data), - ('FormatDocumentName', l15_ph_data), - ('FormatDocumentMajorVersion', l15_ph_data), - ('FormatDocumentMinorVersion', l15_ph_data), - ('CreationDateTime', l15_ph_data), - ('CreatingCentre', l15_ph_data), - ('DataSetIdentification', (l15_ph_data_identification, 27)), - ('TotalFileSize', l15_ph_data), - ('GORT', l15_ph_data), - ('ASTI', l15_ph_data), - ('LLOS', l15_ph_data), - ('SNIT', l15_ph_data), - ('AIID', l15_ph_data), - ('SSBT', l15_ph_data), - ('SSST', l15_ph_data), - ('RRCC', l15_ph_data), - ('RRBT', l15_ph_data), - ('RRST', l15_ph_data), - ('PPRC', l15_ph_data), - ('PPDT', l15_ph_data), - ('GPLV', l15_ph_data), - ('APNM', l15_ph_data), - ('AARF', l15_ph_data), - ('UUDT', l15_ph_data), - ('QQOV', l15_ph_data), - ('UDSP', l15_ph_data) + ("FormatName", l15_ph_data), + ("FormatDocumentName", l15_ph_data), + ("FormatDocumentMajorVersion", l15_ph_data), + ("FormatDocumentMinorVersion", l15_ph_data), + ("CreationDateTime", l15_ph_data), + ("CreatingCentre", l15_ph_data), + ("DataSetIdentification", (l15_ph_data_identification, 27)), + ("TotalFileSize", l15_ph_data), + ("GORT", l15_ph_data), + ("ASTI", l15_ph_data), + ("LLOS", l15_ph_data), + ("SNIT", l15_ph_data), + ("AIID", l15_ph_data), + ("SSBT", l15_ph_data), + ("SSST", l15_ph_data), + ("RRCC", l15_ph_data), + ("RRBT", l15_ph_data), + ("RRST", l15_ph_data), + ("PPRC", l15_ph_data), + ("PPDT", l15_ph_data), + ("GPLV", l15_ph_data), + ("APNM", l15_ph_data), + ("AARF", l15_ph_data), + ("UUDT", l15_ph_data), + ("QQOV", l15_ph_data), + ("UDSP", l15_ph_data) ] return record @@ -169,24 +169,24 @@ def get(self): # 1440 bytes record = [ - ('ABID', l15_ph_data), - ('SMOD', l15_ph_data), - ('APXS', l15_ph_data), - ('AVPA', l15_ph_data), - ('LSCD', l15_ph_data), - ('LMAP', l15_ph_data), - ('QDLC', l15_ph_data), - ('QDLP', l15_ph_data), - ('QQAI', l15_ph_data), - ('SelectedBandIDs', l15_ph_data), - ('SouthLineSelectedRectangle', l15_ph_data), - ('NorthLineSelectedRectangle', l15_ph_data), - ('EastColumnSelectedRectangle', l15_ph_data), - ('WestColumnSelectedRectangle', l15_ph_data), - ('NumberLinesVISIR', l15_ph_data), - ('NumberColumnsVISIR', l15_ph_data), - ('NumberLinesHRV', l15_ph_data), - ('NumberColumnsHRV', l15_ph_data) + ("ABID", l15_ph_data), + ("SMOD", l15_ph_data), + ("APXS", l15_ph_data), + ("AVPA", l15_ph_data), + ("LSCD", l15_ph_data), + ("LMAP", l15_ph_data), + ("QDLC", l15_ph_data), + ("QDLP", l15_ph_data), + ("QQAI", l15_ph_data), + ("SelectedBandIDs", l15_ph_data), + ("SouthLineSelectedRectangle", l15_ph_data), + ("NorthLineSelectedRectangle", l15_ph_data), + ("EastColumnSelectedRectangle", l15_ph_data), + ("WestColumnSelectedRectangle", l15_ph_data), + ("NumberLinesVISIR", l15_ph_data), + ("NumberColumnsVISIR", l15_ph_data), + ("NumberLinesHRV", l15_ph_data), + ("NumberColumnsHRV", l15_ph_data) ] return record @@ -203,14 +203,14 @@ def get(self): """Get header record data.""" # 445248 bytes record = [ - ('15HeaderVersion', np.uint8), - ('SatelliteStatus', self.satellite_status), - ('ImageAcquisition', self.image_acquisition), - ('CelestialEvents', self.celestial_events), - ('ImageDescription', self.image_description), - ('RadiometricProcessing', self.radiometric_processing), - ('GeometricProcessing', self.geometric_processing), - ('IMPFConfiguration', self.impf_configuration)] + ("15HeaderVersion", np.uint8), + ("SatelliteStatus", self.satellite_status), + ("ImageAcquisition", self.image_acquisition), + ("CelestialEvents", self.celestial_events), + ("ImageDescription", self.image_description), + ("RadiometricProcessing", self.radiometric_processing), + ("GeometricProcessing", self.geometric_processing), + ("IMPFConfiguration", self.impf_configuration)] return record @@ -219,72 +219,72 @@ def satellite_status(self): """Get satellite status data.""" # 7 bytes satellite_definition = [ - ('SatelliteId', np.uint16), - ('NominalLongitude', np.float32), - ('SatelliteStatus', np.uint8)] + ("SatelliteId", np.uint16), + ("NominalLongitude", np.float32), + ("SatelliteStatus", np.uint8)] # 28 bytes satellite_operations = [ - ('LastManoeuvreFlag', bool), - ('LastManoeuvreStartTime', time_cds_short), - ('LastManoeuvreEndTime', time_cds_short), - ('LastManoeuvreType', np.uint8), - ('NextManoeuvreFlag', bool), - ('NextManoeuvreStartTime', time_cds_short), - ('NextManoeuvreEndTime', time_cds_short), - ('NextManoeuvreType', np.uint8)] + ("LastManoeuvreFlag", bool), + ("LastManoeuvreStartTime", time_cds_short), + ("LastManoeuvreEndTime", time_cds_short), + ("LastManoeuvreType", np.uint8), + ("NextManoeuvreFlag", bool), + ("NextManoeuvreStartTime", time_cds_short), + ("NextManoeuvreEndTime", time_cds_short), + ("NextManoeuvreType", np.uint8)] # 396 bytes orbit_coeff = [ - ('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('X', (np.float64, 8)), - ('Y', (np.float64, 8)), - ('Z', (np.float64, 8)), - ('VX', (np.float64, 8)), - ('VY', (np.float64, 8)), - ('VZ', (np.float64, 8))] + ("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("X", (np.float64, 8)), + ("Y", (np.float64, 8)), + ("Z", (np.float64, 8)), + ("VX", (np.float64, 8)), + ("VY", (np.float64, 8)), + ("VZ", (np.float64, 8))] # 39612 bytes orbit = [ - ('PeriodStartTime', time_cds_short), - ('PeriodEndTime', time_cds_short), - ('OrbitPolynomial', (orbit_coeff, 100))] + ("PeriodStartTime", time_cds_short), + ("PeriodEndTime", time_cds_short), + ("OrbitPolynomial", (orbit_coeff, 100))] # 204 bytes attitude_coeff = [ - ('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('XofSpinAxis', (np.float64, 8)), - ('YofSpinAxis', (np.float64, 8)), - ('ZofSpinAxis', (np.float64, 8))] + ("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("XofSpinAxis", (np.float64, 8)), + ("YofSpinAxis", (np.float64, 8)), + ("ZofSpinAxis", (np.float64, 8))] # 20420 bytes attitude = [ - ('PeriodStartTime', time_cds_short), - ('PeriodEndTime', time_cds_short), - ('PrincipleAxisOffsetAngle', np.float64), - ('AttitudePolynomial', (attitude_coeff, 100))] + ("PeriodStartTime", time_cds_short), + ("PeriodEndTime", time_cds_short), + ("PrincipleAxisOffsetAngle", np.float64), + ("AttitudePolynomial", (attitude_coeff, 100))] # 59 bytes utc_correlation = [ - ('PeriodStartTime', time_cds_short), - ('PeriodEndTime', time_cds_short), - ('OnBoardTimeStart', (np.uint8, 7)), - ('VarOnBoardTimeStart', np.float64), - ('A1', np.float64), - ('VarA1', np.float64), - ('A2', np.float64), - ('VarA2', np.float64)] + ("PeriodStartTime", time_cds_short), + ("PeriodEndTime", time_cds_short), + ("OnBoardTimeStart", (np.uint8, 7)), + ("VarOnBoardTimeStart", np.float64), + ("A1", np.float64), + ("VarA1", np.float64), + ("A2", np.float64), + ("VarA2", np.float64)] # 60134 bytes record = [ - ('SatelliteDefinition', satellite_definition), - ('SatelliteOperations', satellite_operations), - ('Orbit', orbit), - ('Attitude', attitude), - ('SpinRetreatRCStart', np.float64), - ('UTCCorrelation', utc_correlation)] + ("SatelliteDefinition", satellite_definition), + ("SatelliteOperations", satellite_operations), + ("Orbit", orbit), + ("Attitude", attitude), + ("SpinRetreatRCStart", np.float64), + ("UTCCorrelation", utc_correlation)] return record @@ -292,72 +292,72 @@ def satellite_status(self): def image_acquisition(self): """Get image acquisition data.""" planned_acquisition_time = [ - ('TrueRepeatCycleStart', time_cds_expanded), - ('PlanForwardScanEnd', time_cds_expanded), - ('PlannedRepeatCycleEnd', time_cds_expanded)] + ("TrueRepeatCycleStart", time_cds_expanded), + ("PlanForwardScanEnd", time_cds_expanded), + ("PlannedRepeatCycleEnd", time_cds_expanded)] radiometer_status = [ - ('ChannelStatus', (np.uint8, 12)), - ('DetectorStatus', (np.uint8, 42))] + ("ChannelStatus", (np.uint8, 12)), + ("DetectorStatus", (np.uint8, 42))] hrv_frame_offsets = [ - ('MDUNomHRVDelay1', np.uint16), - ('MDUNomHRVDelay2', np.uint16), - ('Spare', np.uint16), - ('MDUNomHRVBreakLine', np.uint16)] + ("MDUNomHRVDelay1", np.uint16), + ("MDUNomHRVDelay2", np.uint16), + ("Spare", np.uint16), + ("MDUNomHRVBreakLine", np.uint16)] operation_parameters = [ - ('L0_LineCounter', np.uint16), - ('K1_RetraceLines', np.uint16), - ('K2_PauseDeciseconds', np.uint16), - ('K3_RetraceLines', np.uint16), - ('K4_PauseDeciseconds', np.uint16), - ('K5_RetraceLines', np.uint16), - ('XDeepSpaceWindowPosition', np.uint8)] + ("L0_LineCounter", np.uint16), + ("K1_RetraceLines", np.uint16), + ("K2_PauseDeciseconds", np.uint16), + ("K3_RetraceLines", np.uint16), + ("K4_PauseDeciseconds", np.uint16), + ("K5_RetraceLines", np.uint16), + ("XDeepSpaceWindowPosition", np.uint8)] radiometer_settings = [ - ('MDUSamplingDelays', (np.uint16, 42)), - ('HRVFrameOffsets', hrv_frame_offsets), - ('DHSSSynchSelection', np.uint8), - ('MDUOutGain', (np.uint16, 42)), - ('MDUCoarseGain', (np.uint8, 42)), - ('MDUFineGain', (np.uint16, 42)), - ('MDUNumericalOffset', (np.uint16, 42)), - ('PUGain', (np.uint16, 42)), - ('PUOffset', (np.uint16, 27)), - ('PUBias', (np.uint16, 15)), - ('OperationParameters', operation_parameters), - ('RefocusingLines', np.uint16), - ('RefocusingDirection', np.uint8), - ('RefocusingPosition', np.uint16), - ('ScanRefPosFlag', bool), - ('ScanRefPosNumber', np.uint16), - ('ScanRefPosVal', np.float32), - ('ScanFirstLine', np.uint16), - ('ScanLastLine', np.uint16), - ('RetraceStartLine', np.uint16)] + ("MDUSamplingDelays", (np.uint16, 42)), + ("HRVFrameOffsets", hrv_frame_offsets), + ("DHSSSynchSelection", np.uint8), + ("MDUOutGain", (np.uint16, 42)), + ("MDUCoarseGain", (np.uint8, 42)), + ("MDUFineGain", (np.uint16, 42)), + ("MDUNumericalOffset", (np.uint16, 42)), + ("PUGain", (np.uint16, 42)), + ("PUOffset", (np.uint16, 27)), + ("PUBias", (np.uint16, 15)), + ("OperationParameters", operation_parameters), + ("RefocusingLines", np.uint16), + ("RefocusingDirection", np.uint8), + ("RefocusingPosition", np.uint16), + ("ScanRefPosFlag", bool), + ("ScanRefPosNumber", np.uint16), + ("ScanRefPosVal", np.float32), + ("ScanFirstLine", np.uint16), + ("ScanLastLine", np.uint16), + ("RetraceStartLine", np.uint16)] decontamination = [ - ('DecontaminationNow', bool), - ('DecontaminationStart', time_cds_short), - ('DecontaminationEnd', time_cds_short)] + ("DecontaminationNow", bool), + ("DecontaminationStart", time_cds_short), + ("DecontaminationEnd", time_cds_short)] radiometer_operations = [ - ('LastGainChangeFlag', bool), - ('LastGainChangeTime', time_cds_short), - ('Decontamination', decontamination), - ('BBCalScheduled', bool), - ('BBCalibrationType', np.uint8), - ('BBFirstLine', np.uint16), - ('BBLastLine', np.uint16), - ('ColdFocalPlaneOpTemp', np.uint16), - ('WarmFocalPlaneOpTemp', np.uint16)] + ("LastGainChangeFlag", bool), + ("LastGainChangeTime", time_cds_short), + ("Decontamination", decontamination), + ("BBCalScheduled", bool), + ("BBCalibrationType", np.uint8), + ("BBFirstLine", np.uint16), + ("BBLastLine", np.uint16), + ("ColdFocalPlaneOpTemp", np.uint16), + ("WarmFocalPlaneOpTemp", np.uint16)] record = [ - ('PlannedAcquisitionTime', planned_acquisition_time), - ('RadiometerStatus', radiometer_status), - ('RadiometerSettings', radiometer_settings), - ('RadiometerOperations', radiometer_operations)] + ("PlannedAcquisitionTime", planned_acquisition_time), + ("RadiometerStatus", radiometer_status), + ("RadiometerSettings", radiometer_settings), + ("RadiometerOperations", radiometer_operations)] return record @@ -365,39 +365,39 @@ def image_acquisition(self): def celestial_events(self): """Get celestial events data.""" earth_moon_sun_coeff = [ - ('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('AlphaCoef', (np.float64, 8)), - ('BetaCoef', (np.float64, 8))] + ("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("AlphaCoef", (np.float64, 8)), + ("BetaCoef", (np.float64, 8))] star_coeff = [ - ('StarId', np.uint16), - ('StartTime', time_cds_short), - ('EndTime', time_cds_short), - ('AlphaCoef', (np.float64, 8)), - ('BetaCoef', (np.float64, 8))] + ("StarId", np.uint16), + ("StartTime", time_cds_short), + ("EndTime", time_cds_short), + ("AlphaCoef", (np.float64, 8)), + ("BetaCoef", (np.float64, 8))] ephemeris = [ - ('PeriodTimeStart', time_cds_short), - ('PeriodTimeEnd', time_cds_short), - ('RelatedOrbitFileTime', 'S15'), - ('RelatedAttitudeFileTime', 'S15'), - ('EarthEphemeris', (earth_moon_sun_coeff, 100)), - ('MoonEphemeris', (earth_moon_sun_coeff, 100)), - ('SunEphemeris', (earth_moon_sun_coeff, 100)), - ('StarEphemeris', (star_coeff, (20, 100)))] + ("PeriodTimeStart", time_cds_short), + ("PeriodTimeEnd", time_cds_short), + ("RelatedOrbitFileTime", "S15"), + ("RelatedAttitudeFileTime", "S15"), + ("EarthEphemeris", (earth_moon_sun_coeff, 100)), + ("MoonEphemeris", (earth_moon_sun_coeff, 100)), + ("SunEphemeris", (earth_moon_sun_coeff, 100)), + ("StarEphemeris", (star_coeff, (20, 100)))] relation_to_image = [ - ('TypeOfEclipse', np.uint8), - ('EclipseStartTime', time_cds_short), - ('EclipseEndTime', time_cds_short), - ('VisibleBodiesInImage', np.uint8), - ('BodiesCloseToFOV', np.uint8), - ('ImpactOnImageQuality', np.uint8)] + ("TypeOfEclipse", np.uint8), + ("EclipseStartTime", time_cds_short), + ("EclipseEndTime", time_cds_short), + ("VisibleBodiesInImage", np.uint8), + ("BodiesCloseToFOV", np.uint8), + ("ImpactOnImageQuality", np.uint8)] record = [ - ('CelestialBodiesPosition', ephemeris), - ('RelationToImage', relation_to_image)] + ("CelestialBodiesPosition", ephemeris), + ("RelationToImage", relation_to_image)] return record @@ -405,44 +405,44 @@ def celestial_events(self): def image_description(self): """Get image description data.""" projection_description = [ - ('TypeOfProjection', np.uint8), - ('LongitudeOfSSP', np.float32)] + ("TypeOfProjection", np.uint8), + ("LongitudeOfSSP", np.float32)] reference_grid = [ - ('NumberOfLines', np.int32), - ('NumberOfColumns', np.int32), - ('LineDirGridStep', np.float32), - ('ColumnDirGridStep', np.float32), - ('GridOrigin', np.uint8)] + ("NumberOfLines", np.int32), + ("NumberOfColumns", np.int32), + ("LineDirGridStep", np.float32), + ("ColumnDirGridStep", np.float32), + ("GridOrigin", np.uint8)] planned_coverage_vis_ir = [ - ('SouthernLinePlanned', np.int32), - ('NorthernLinePlanned', np.int32), - ('EasternColumnPlanned', np.int32), - ('WesternColumnPlanned', np.int32)] + ("SouthernLinePlanned", np.int32), + ("NorthernLinePlanned", np.int32), + ("EasternColumnPlanned", np.int32), + ("WesternColumnPlanned", np.int32)] planned_coverage_hrv = [ - ('LowerSouthLinePlanned', np.int32), - ('LowerNorthLinePlanned', np.int32), - ('LowerEastColumnPlanned', np.int32), - ('LowerWestColumnPlanned', np.int32), - ('UpperSouthLinePlanned', np.int32), - ('UpperNorthLinePlanned', np.int32), - ('UpperEastColumnPlanned', np.int32), - ('UpperWestColumnPlanned', np.int32)] + ("LowerSouthLinePlanned", np.int32), + ("LowerNorthLinePlanned", np.int32), + ("LowerEastColumnPlanned", np.int32), + ("LowerWestColumnPlanned", np.int32), + ("UpperSouthLinePlanned", np.int32), + ("UpperNorthLinePlanned", np.int32), + ("UpperEastColumnPlanned", np.int32), + ("UpperWestColumnPlanned", np.int32)] level_15_image_production = [ - ('ImageProcDirection', np.uint8), - ('PixelGenDirection', np.uint8), - ('PlannedChanProcessing', (np.uint8, 12))] + ("ImageProcDirection", np.uint8), + ("PixelGenDirection", np.uint8), + ("PlannedChanProcessing", (np.uint8, 12))] record = [ - ('ProjectionDescription', projection_description), - ('ReferenceGridVIS_IR', reference_grid), - ('ReferenceGridHRV', reference_grid), - ('PlannedCoverageVIS_IR', planned_coverage_vis_ir), - ('PlannedCoverageHRV', planned_coverage_hrv), - ('Level15ImageProduction', level_15_image_production)] + ("ProjectionDescription", projection_description), + ("ReferenceGridVIS_IR", reference_grid), + ("ReferenceGridHRV", reference_grid), + ("PlannedCoverageVIS_IR", planned_coverage_vis_ir), + ("PlannedCoverageHRV", planned_coverage_hrv), + ("Level15ImageProduction", level_15_image_production)] return record @@ -450,122 +450,122 @@ def image_description(self): def radiometric_processing(self): """Get radiometric processing data.""" rp_summary = [ - ('RadianceLinearization', (bool, 12)), - ('DetectorEqualization', (bool, 12)), - ('OnboardCalibrationResult', (bool, 12)), - ('MPEFCalFeedback', (bool, 12)), - ('MTFAdaptation', (bool, 12)), - ('StrayLightCorrection', (bool, 12))] + ("RadianceLinearization", (bool, 12)), + ("DetectorEqualization", (bool, 12)), + ("OnboardCalibrationResult", (bool, 12)), + ("MPEFCalFeedback", (bool, 12)), + ("MTFAdaptation", (bool, 12)), + ("StrayLightCorrection", (bool, 12))] level_15_image_calibration = [ - ('CalSlope', np.float64), - ('CalOffset', np.float64)] + ("CalSlope", np.float64), + ("CalOffset", np.float64)] time_cuc_size = [ - ('CT1', np.uint8), - ('CT2', np.uint8), - ('CT3', np.uint8), - ('CT4', np.uint8), - ('FT1', np.uint8), - ('FT2', np.uint8), - ('FT3', np.uint8)] + ("CT1", np.uint8), + ("CT2", np.uint8), + ("CT3", np.uint8), + ("CT4", np.uint8), + ("FT1", np.uint8), + ("FT2", np.uint8), + ("FT3", np.uint8)] cold_fp_temperature = [ - ('FCUNominalColdFocalPlaneTemp', np.uint16), - ('FCURedundantColdFocalPlaneTemp', np.uint16)] + ("FCUNominalColdFocalPlaneTemp", np.uint16), + ("FCURedundantColdFocalPlaneTemp", np.uint16)] warm_fp_temperature = [ - ('FCUNominalWarmFocalPlaneVHROTemp', np.uint16), - ('FCURedundantWarmFocalPlaneVHROTemp', np.uint16)] + ("FCUNominalWarmFocalPlaneVHROTemp", np.uint16), + ("FCURedundantWarmFocalPlaneVHROTemp", np.uint16)] scan_mirror_temperature = [ - ('FCUNominalScanMirrorSensor1Temp', np.uint16), - ('FCURedundantScanMirrorSensor1Temp', np.uint16), - ('FCUNominalScanMirrorSensor2Temp', np.uint16), - ('FCURedundantScanMirrorSensor2Temp', np.uint16)] + ("FCUNominalScanMirrorSensor1Temp", np.uint16), + ("FCURedundantScanMirrorSensor1Temp", np.uint16), + ("FCUNominalScanMirrorSensor2Temp", np.uint16), + ("FCURedundantScanMirrorSensor2Temp", np.uint16)] m1m2m3_temperature = [ - ('FCUNominalM1MirrorSensor1Temp', np.uint16), - ('FCURedundantM1MirrorSensor1Temp', np.uint16), - ('FCUNominalM1MirrorSensor2Temp', np.uint16), - ('FCURedundantM1MirrorSensor2Temp', np.uint16), - ('FCUNominalM23AssemblySensor1Temp', np.uint8), - ('FCURedundantM23AssemblySensor1Temp', np.uint8), - ('FCUNominalM23AssemblySensor2Temp', np.uint8), - ('FCURedundantM23AssemblySensor2Temp', np.uint8)] + ("FCUNominalM1MirrorSensor1Temp", np.uint16), + ("FCURedundantM1MirrorSensor1Temp", np.uint16), + ("FCUNominalM1MirrorSensor2Temp", np.uint16), + ("FCURedundantM1MirrorSensor2Temp", np.uint16), + ("FCUNominalM23AssemblySensor1Temp", np.uint8), + ("FCURedundantM23AssemblySensor1Temp", np.uint8), + ("FCUNominalM23AssemblySensor2Temp", np.uint8), + ("FCURedundantM23AssemblySensor2Temp", np.uint8)] baffle_temperature = [ - ('FCUNominalM1BaffleTemp', np.uint16), - ('FCURedundantM1BaffleTemp', np.uint16)] + ("FCUNominalM1BaffleTemp", np.uint16), + ("FCURedundantM1BaffleTemp", np.uint16)] blackbody_temperature = [ - ('FCUNominalBlackBodySensorTemp', np.uint16), - ('FCURedundantBlackBodySensorTemp', np.uint16)] + ("FCUNominalBlackBodySensorTemp", np.uint16), + ("FCURedundantBlackBodySensorTemp", np.uint16)] fcu_mode = [ - ('FCUNominalSMMStatus', 'S2'), - ('FCURedundantSMMStatus', 'S2')] + ("FCUNominalSMMStatus", "S2"), + ("FCURedundantSMMStatus", "S2")] extracted_bb_data = [ - ('NumberOfPixelsUsed', np.uint32), - ('MeanCount', np.float32), - ('RMS', np.float32), - ('MaxCount', np.uint16), - ('MinCount', np.uint16), - ('BB_Processing_Slope', np.float64), - ('BB_Processing_Offset', np.float64)] + ("NumberOfPixelsUsed", np.uint32), + ("MeanCount", np.float32), + ("RMS", np.float32), + ("MaxCount", np.uint16), + ("MinCount", np.uint16), + ("BB_Processing_Slope", np.float64), + ("BB_Processing_Offset", np.float64)] bb_related_data = [ - ('OnBoardBBTime', time_cuc_size), - ('MDUOutGain', (np.uint16, 42)), - ('MDUCoarseGain', (np.uint8, 42)), - ('MDUFineGain', (np.uint16, 42)), - ('MDUNumericalOffset', (np.uint16, 42)), - ('PUGain', (np.uint16, 42)), - ('PUOffset', (np.uint16, 27)), - ('PUBias', (np.uint16, 15)), - ('DCRValues', (np.uint8, 63)), - ('X_DeepSpaceWindowPosition', np.int8), - ('ColdFPTemperature', cold_fp_temperature), - ('WarmFPTemperature', warm_fp_temperature), - ('ScanMirrorTemperature', scan_mirror_temperature), - ('M1M2M3Temperature', m1m2m3_temperature), - ('BaffleTemperature', baffle_temperature), - ('BlackBodyTemperature', blackbody_temperature), - ('FCUMode', fcu_mode), - ('ExtractedBBData', (extracted_bb_data, 12))] + ("OnBoardBBTime", time_cuc_size), + ("MDUOutGain", (np.uint16, 42)), + ("MDUCoarseGain", (np.uint8, 42)), + ("MDUFineGain", (np.uint16, 42)), + ("MDUNumericalOffset", (np.uint16, 42)), + ("PUGain", (np.uint16, 42)), + ("PUOffset", (np.uint16, 27)), + ("PUBias", (np.uint16, 15)), + ("DCRValues", (np.uint8, 63)), + ("X_DeepSpaceWindowPosition", np.int8), + ("ColdFPTemperature", cold_fp_temperature), + ("WarmFPTemperature", warm_fp_temperature), + ("ScanMirrorTemperature", scan_mirror_temperature), + ("M1M2M3Temperature", m1m2m3_temperature), + ("BaffleTemperature", baffle_temperature), + ("BlackBodyTemperature", blackbody_temperature), + ("FCUMode", fcu_mode), + ("ExtractedBBData", (extracted_bb_data, 12))] black_body_data_used = [ - ('BBObservationUTC', time_cds_expanded), - ('BBRelatedData', bb_related_data)] + ("BBObservationUTC", time_cds_expanded), + ("BBRelatedData", bb_related_data)] impf_cal_data = [ - ('ImageQualityFlag', np.uint8), - ('ReferenceDataFlag', np.uint8), - ('AbsCalMethod', np.uint8), - ('Pad1', 'S1'), - ('AbsCalWeightVic', np.float32), - ('AbsCalWeightXsat', np.float32), - ('AbsCalCoeff', np.float32), - ('AbsCalError', np.float32), - ('GSICSCalCoeff', np.float32), - ('GSICSCalError', np.float32), - ('GSICSOffsetCount', np.float32)] + ("ImageQualityFlag", np.uint8), + ("ReferenceDataFlag", np.uint8), + ("AbsCalMethod", np.uint8), + ("Pad1", "S1"), + ("AbsCalWeightVic", np.float32), + ("AbsCalWeightXsat", np.float32), + ("AbsCalCoeff", np.float32), + ("AbsCalError", np.float32), + ("GSICSCalCoeff", np.float32), + ("GSICSCalError", np.float32), + ("GSICSOffsetCount", np.float32)] rad_proc_mtf_adaptation = [ - ('VIS_IRMTFCorrectionE_W', (np.float32, (33, 16))), - ('VIS_IRMTFCorrectionN_S', (np.float32, (33, 16))), - ('HRVMTFCorrectionE_W', (np.float32, (9, 16))), - ('HRVMTFCorrectionN_S', (np.float32, (9, 16))), - ('StraylightCorrection', (np.float32, (12, 8, 8)))] + ("VIS_IRMTFCorrectionE_W", (np.float32, (33, 16))), + ("VIS_IRMTFCorrectionN_S", (np.float32, (33, 16))), + ("HRVMTFCorrectionE_W", (np.float32, (9, 16))), + ("HRVMTFCorrectionN_S", (np.float32, (9, 16))), + ("StraylightCorrection", (np.float32, (12, 8, 8)))] record = [ - ('RPSummary', rp_summary), - ('Level15ImageCalibration', (level_15_image_calibration, 12)), - ('BlackBodyDataUsed', black_body_data_used), - ('MPEFCalFeedback', (impf_cal_data, 12)), - ('RadTransform', (np.float32, (42, 64))), - ('RadProcMTFAdaptation', rad_proc_mtf_adaptation)] + ("RPSummary", rp_summary), + ("Level15ImageCalibration", (level_15_image_calibration, 12)), + ("BlackBodyDataUsed", black_body_data_used), + ("MPEFCalFeedback", (impf_cal_data, 12)), + ("RadTransform", (np.float32, (42, 64))), + ("RadProcMTFAdaptation", rad_proc_mtf_adaptation)] return record @@ -573,20 +573,20 @@ def radiometric_processing(self): def geometric_processing(self): """Get geometric processing data.""" opt_axis_distances = [ - ('E-WFocalPlane', (np.float32, 42)), - ('N_SFocalPlane', (np.float32, 42))] + ("E-WFocalPlane", (np.float32, 42)), + ("N_SFocalPlane", (np.float32, 42))] earth_model = [ - ('TypeOfEarthModel', np.uint8), - ('EquatorialRadius', np.float64), - ('NorthPolarRadius', np.float64), - ('SouthPolarRadius', np.float64)] + ("TypeOfEarthModel", np.uint8), + ("EquatorialRadius", np.float64), + ("NorthPolarRadius", np.float64), + ("SouthPolarRadius", np.float64)] record = [ - ('OptAxisDistances', opt_axis_distances), - ('EarthModel', earth_model), - ('AtmosphericModel', (np.float32, (12, 360))), - ('ResamplingFunctions', (np.uint8, 12))] + ("OptAxisDistances", opt_axis_distances), + ("EarthModel", earth_model), + ("AtmosphericModel", (np.float32, (12, 360))), + ("ResamplingFunctions", (np.uint8, 12))] return record @@ -594,8 +594,8 @@ def geometric_processing(self): def impf_configuration(self): """Get impf configuration information.""" overall_configuration = [ - ('Issue', np.uint16), - ('Revision', np.uint16) + ("Issue", np.uint16), + ("Revision", np.uint16) ] sw_version = overall_configuration @@ -603,82 +603,82 @@ def impf_configuration(self): info_base_versions = sw_version su_configuration = [ - ('SWVersion', sw_version), - ('InfoBaseVersions', (info_base_versions, 10)) + ("SWVersion", sw_version), + ("InfoBaseVersions", (info_base_versions, 10)) ] su_details = [ - ('SUId', GSDTRecords.gp_su_id), - ('SUIdInstance', np.int8), - ('SUMode', np.uint8), - ('SUState', np.uint8), - ('SUConfiguration', su_configuration) + ("SUId", GSDTRecords.gp_su_id), + ("SUIdInstance", np.int8), + ("SUMode", np.uint8), + ("SUState", np.uint8), + ("SUConfiguration", su_configuration) ] equalisation_params = [ - ('ConstCoeff', np.float32), - ('LinearCoeff', np.float32), - ('QuadraticCoeff', np.float32) + ("ConstCoeff", np.float32), + ("LinearCoeff", np.float32), + ("QuadraticCoeff", np.float32) ] black_body_data_for_warm_start = [ - ('GTotalForMethod1', (np.float64, 12)), - ('GTotalForMethod2', (np.float64, 12)), - ('GTotalForMethod3', (np.float64, 12)), - ('GBackForMethod1', (np.float64, 12)), - ('GBackForMethod2', (np.float64, 12)), - ('GBackForMethod3', (np.float64, 12)), - ('RatioGTotalToGBack', (np.float64, 12)), - ('GainInFrontOpticsCont', (np.float64, 12)), - ('CalibrationConstants', (np.float32, 12)), - ('maxIncidentRadiance', (np.float64, 12)), - ('TimeOfColdObsSeconds', np.float64), - ('TimeOfColdObsNanoSecs', np.float64), - ('IncidenceRadiance', (np.float64, 12)), - ('TempCal', np.float64), - ('TempM1', np.float64), - ('TempScan', np.float64), - ('TempM1Baf', np.float64), - ('TempCalSurround', np.float64) + ("GTotalForMethod1", (np.float64, 12)), + ("GTotalForMethod2", (np.float64, 12)), + ("GTotalForMethod3", (np.float64, 12)), + ("GBackForMethod1", (np.float64, 12)), + ("GBackForMethod2", (np.float64, 12)), + ("GBackForMethod3", (np.float64, 12)), + ("RatioGTotalToGBack", (np.float64, 12)), + ("GainInFrontOpticsCont", (np.float64, 12)), + ("CalibrationConstants", (np.float32, 12)), + ("maxIncidentRadiance", (np.float64, 12)), + ("TimeOfColdObsSeconds", np.float64), + ("TimeOfColdObsNanoSecs", np.float64), + ("IncidenceRadiance", (np.float64, 12)), + ("TempCal", np.float64), + ("TempM1", np.float64), + ("TempScan", np.float64), + ("TempM1Baf", np.float64), + ("TempCalSurround", np.float64) ] mirror_parameters = [ - ('MaxFeedbackVoltage', np.float64), - ('MinFeedbackVoltage', np.float64), - ('MirrorSlipEstimate', np.float64) + ("MaxFeedbackVoltage", np.float64), + ("MinFeedbackVoltage", np.float64), + ("MirrorSlipEstimate", np.float64) ] hktm_parameters = [ - ('TimeS0Packet', time_cds_short), - ('TimeS1Packet', time_cds_short), - ('TimeS2Packet', time_cds_short), - ('TimeS3Packet', time_cds_short), - ('TimeS4Packet', time_cds_short), - ('TimeS5Packet', time_cds_short), - ('TimeS6Packet', time_cds_short), - ('TimeS7Packet', time_cds_short), - ('TimeS8Packet', time_cds_short), - ('TimeS9Packet', time_cds_short), - ('TimeSYPacket', time_cds_short), - ('TimePSPacket', time_cds_short) + ("TimeS0Packet", time_cds_short), + ("TimeS1Packet", time_cds_short), + ("TimeS2Packet", time_cds_short), + ("TimeS3Packet", time_cds_short), + ("TimeS4Packet", time_cds_short), + ("TimeS5Packet", time_cds_short), + ("TimeS6Packet", time_cds_short), + ("TimeS7Packet", time_cds_short), + ("TimeS8Packet", time_cds_short), + ("TimeS9Packet", time_cds_short), + ("TimeSYPacket", time_cds_short), + ("TimePSPacket", time_cds_short) ] warm_start_params = [ - ('ScanningLaw', (np.float64, 1527)), - ('RadFramesAlignment', (np.float64, 3)), - ('ScanningLawVariation', (np.float32, 2)), - ('EqualisationParams', (equalisation_params, 42)), - ('BlackBodyDataForWarmStart', black_body_data_for_warm_start), - ('MirrorParameters', mirror_parameters), - ('LastSpinPeriod', np.float64), - ('HKTMParameters', hktm_parameters), - ('WSPReserved', (np.uint8, 3312)) + ("ScanningLaw", (np.float64, 1527)), + ("RadFramesAlignment", (np.float64, 3)), + ("ScanningLawVariation", (np.float32, 2)), + ("EqualisationParams", (equalisation_params, 42)), + ("BlackBodyDataForWarmStart", black_body_data_for_warm_start), + ("MirrorParameters", mirror_parameters), + ("LastSpinPeriod", np.float64), + ("HKTMParameters", hktm_parameters), + ("WSPReserved", (np.uint8, 3312)) ] record = [ - ('OverallConfiguration', overall_configuration), - ('SUDetails', (su_details, 50)), - ('WarmStartParams', warm_start_params) + ("OverallConfiguration", overall_configuration), + ("SUDetails", (su_details, 50)), + ("WarmStartParams", warm_start_params) ] return record @@ -695,23 +695,23 @@ def get(self): """Get header record data.""" # 380363 bytes record = [ - ('GP_PK_HEADER', GSDTRecords.gp_pk_header), - ('GP_PK_SH1', GSDTRecords.gp_pk_sh1), - ('15TRAILER', self.seviri_l15_trailer) + ("GP_PK_HEADER", GSDTRecords.gp_pk_header), + ("GP_PK_SH1", GSDTRecords.gp_pk_sh1), + ("15TRAILER", self.seviri_l15_trailer) ] - return np.dtype(record).newbyteorder('>') + return np.dtype(record).newbyteorder(">") @property def seviri_l15_trailer(self): """Get file trailer data.""" record = [ - ('15TrailerVersion', np.uint8), - ('ImageProductionStats', self.image_production_stats), - ('NavigationExtractionResults', self.navigation_extraction_results), - ('RadiometricQuality', self.radiometric_quality), - ('GeometricQuality', self.geometric_quality), - ('TimelinessAndCompleteness', self.timeliness_and_completeness) + ("15TrailerVersion", np.uint8), + ("ImageProductionStats", self.image_production_stats), + ("NavigationExtractionResults", self.navigation_extraction_results), + ("RadiometricQuality", self.radiometric_quality), + ("GeometricQuality", self.geometric_quality), + ("TimelinessAndCompleteness", self.timeliness_and_completeness) ] return record @@ -721,69 +721,69 @@ def image_production_stats(self): gp_sc_id = GSDTRecords.gp_sc_id actual_scanning_summary = [ - ('NominalImageScanning', np.uint8), - ('ReducedScan', np.uint8), - ('ForwardScanStart', time_cds_short), - ('ForwardScanEnd', time_cds_short) + ("NominalImageScanning", np.uint8), + ("ReducedScan", np.uint8), + ("ForwardScanStart", time_cds_short), + ("ForwardScanEnd", time_cds_short) ] radiometric_behaviour = [ - ('NominalBehaviour', np.uint8), - ('RadScanIrregularity', np.uint8), - ('RadStoppage', np.uint8), - ('RepeatCycleNotCompleted', np.uint8), - ('GainChangeTookPlace', np.uint8), - ('DecontaminationTookPlace', np.uint8), - ('NoBBCalibrationAchieved', np.uint8), - ('IncorrectTemperature', np.uint8), - ('InvalidBBData', np.uint8), - ('InvalidAuxOrHKTMData', np.uint8), - ('RefocusingMechanismActuated', np.uint8), - ('MirrorBackToReferencePos', np.uint8) + ("NominalBehaviour", np.uint8), + ("RadScanIrregularity", np.uint8), + ("RadStoppage", np.uint8), + ("RepeatCycleNotCompleted", np.uint8), + ("GainChangeTookPlace", np.uint8), + ("DecontaminationTookPlace", np.uint8), + ("NoBBCalibrationAchieved", np.uint8), + ("IncorrectTemperature", np.uint8), + ("InvalidBBData", np.uint8), + ("InvalidAuxOrHKTMData", np.uint8), + ("RefocusingMechanismActuated", np.uint8), + ("MirrorBackToReferencePos", np.uint8) ] reception_summary_stats = [ - ('PlannedNumberOfL10Lines', (np.uint32, 12)), - ('NumberOfMissingL10Lines', (np.uint32, 12)), - ('NumberOfCorruptedL10Lines', (np.uint32, 12)), - ('NumberOfReplacedL10Lines', (np.uint32, 12)) + ("PlannedNumberOfL10Lines", (np.uint32, 12)), + ("NumberOfMissingL10Lines", (np.uint32, 12)), + ("NumberOfCorruptedL10Lines", (np.uint32, 12)), + ("NumberOfReplacedL10Lines", (np.uint32, 12)) ] l15_image_validity = [ - ('NominalImage', np.uint8), - ('NonNominalBecauseIncomplete', np.uint8), - ('NonNominalRadiometricQuality', np.uint8), - ('NonNominalGeometricQuality', np.uint8), - ('NonNominalTimeliness', np.uint8), - ('IncompleteL15', np.uint8), + ("NominalImage", np.uint8), + ("NonNominalBecauseIncomplete", np.uint8), + ("NonNominalRadiometricQuality", np.uint8), + ("NonNominalGeometricQuality", np.uint8), + ("NonNominalTimeliness", np.uint8), + ("IncompleteL15", np.uint8), ] actual_l15_coverage_vis_ir = [ - ('SouthernLineActual', np.int32), - ('NorthernLineActual', np.int32), - ('EasternColumnActual', np.int32), - ('WesternColumnActual', np.int32) + ("SouthernLineActual", np.int32), + ("NorthernLineActual", np.int32), + ("EasternColumnActual", np.int32), + ("WesternColumnActual", np.int32) ] actual_l15_coverage_hrv = [ - ('LowerSouthLineActual', np.int32), - ('LowerNorthLineActual', np.int32), - ('LowerEastColumnActual', np.int32), - ('LowerWestColumnActual', np.int32), - ('UpperSouthLineActual', np.int32), - ('UpperNorthLineActual', np.int32), - ('UpperEastColumnActual', np.int32), - ('UpperWestColumnActual', np.int32), + ("LowerSouthLineActual", np.int32), + ("LowerNorthLineActual", np.int32), + ("LowerEastColumnActual", np.int32), + ("LowerWestColumnActual", np.int32), + ("UpperSouthLineActual", np.int32), + ("UpperNorthLineActual", np.int32), + ("UpperEastColumnActual", np.int32), + ("UpperWestColumnActual", np.int32), ] record = [ - ('SatelliteId', gp_sc_id), - ('ActualScanningSummary', actual_scanning_summary), - ('RadiometricBehaviour', radiometric_behaviour), - ('ReceptionSummaryStats', reception_summary_stats), - ('L15ImageValidity', (l15_image_validity, 12)), - ('ActualL15CoverageVIS_IR', actual_l15_coverage_vis_ir), - ('ActualL15CoverageHRV', actual_l15_coverage_hrv) + ("SatelliteId", gp_sc_id), + ("ActualScanningSummary", actual_scanning_summary), + ("RadiometricBehaviour", radiometric_behaviour), + ("ReceptionSummaryStats", reception_summary_stats), + ("L15ImageValidity", (l15_image_validity, 12)), + ("ActualL15CoverageVIS_IR", actual_l15_coverage_vis_ir), + ("ActualL15CoverageHRV", actual_l15_coverage_hrv) ] return record @@ -792,47 +792,47 @@ def image_production_stats(self): def navigation_extraction_results(self): """Get navigation extraction data.""" horizon_observation = [ - ('HorizonId', np.uint8), - ('Alpha', np.float64), - ('AlphaConfidence', np.float64), - ('Beta', np.float64), - ('BetaConfidence', np.float64), - ('ObservationTime', time_cds), - ('SpinRate', np.float64), - ('AlphaDeviation', np.float64), - ('BetaDeviation', np.float64) + ("HorizonId", np.uint8), + ("Alpha", np.float64), + ("AlphaConfidence", np.float64), + ("Beta", np.float64), + ("BetaConfidence", np.float64), + ("ObservationTime", time_cds), + ("SpinRate", np.float64), + ("AlphaDeviation", np.float64), + ("BetaDeviation", np.float64) ] star_observation = [ - ('StarId', np.uint16), - ('Alpha', np.float64), - ('AlphaConfidence', np.float64), - ('Beta', np.float64), - ('BetaConfidence', np.float64), - ('ObservationTime', time_cds), - ('SpinRate', np.float64), - ('AlphaDeviation', np.float64), - ('BetaDeviation', np.float64) + ("StarId", np.uint16), + ("Alpha", np.float64), + ("AlphaConfidence", np.float64), + ("Beta", np.float64), + ("BetaConfidence", np.float64), + ("ObservationTime", time_cds), + ("SpinRate", np.float64), + ("AlphaDeviation", np.float64), + ("BetaDeviation", np.float64) ] landmark_observation = [ - ('LandmarkId', np.uint16), - ('LandmarkLongitude', np.float64), - ('LandmarkLatitude', np.float64), - ('Alpha', np.float64), - ('AlphaConfidence', np.float64), - ('Beta', np.float64), - ('BetaConfidence', np.float64), - ('ObservationTime', time_cds), - ('SpinRate', np.float64), - ('AlphaDeviation', np.float64), - ('BetaDeviation', np.float64) + ("LandmarkId", np.uint16), + ("LandmarkLongitude", np.float64), + ("LandmarkLatitude", np.float64), + ("Alpha", np.float64), + ("AlphaConfidence", np.float64), + ("Beta", np.float64), + ("BetaConfidence", np.float64), + ("ObservationTime", time_cds), + ("SpinRate", np.float64), + ("AlphaDeviation", np.float64), + ("BetaDeviation", np.float64) ] record = [ - ('ExtractedHorizons', (horizon_observation, 4)), - ('ExtractedStars', (star_observation, 20)), - ('ExtractedLandmarks', (landmark_observation, 50)) + ("ExtractedHorizons", (horizon_observation, 4)), + ("ExtractedStars", (star_observation, 20)), + ("ExtractedLandmarks", (landmark_observation, 50)) ] return record @@ -841,85 +841,85 @@ def navigation_extraction_results(self): def radiometric_quality(self): """Get radiometric quality record data.""" l10_rad_quality = [ - ('FullImageMinimumCount', np.uint16), - ('FullImageMaximumCount', np.uint16), - ('EarthDiskMinimumCount', np.uint16), - ('EarthDiskMaximumCount', np.uint16), - ('MoonMinimumCount', np.uint16), - ('MoonMaximumCount', np.uint16), - ('FullImageMeanCount', np.float32), - ('FullImageStandardDeviation', np.float32), - ('EarthDiskMeanCount', np.float32), - ('EarthDiskStandardDeviation', np.float32), - ('MoonMeanCount', np.float32), - ('MoonStandardDeviation', np.float32), - ('SpaceMeanCount', np.float32), - ('SpaceStandardDeviation', np.float32), - ('SESpaceCornerMeanCount', np.float32), - ('SESpaceCornerStandardDeviation', np.float32), - ('SWSpaceCornerMeanCount', np.float32), - ('SWSpaceCornerStandardDeviation', np.float32), - ('NESpaceCornerMeanCount', np.float32), - ('NESpaceCornerStandardDeviation', np.float32), - ('NWSpaceCornerMeanCount', np.float32), - ('NWSpaceCornerStandardDeviation', np.float32), - ('4SpaceCornersMeanCount', np.float32), - ('4SpaceCornersStandardDeviation', np.float32), - ('FullImageHistogram', (np.uint32, 256)), - ('EarthDiskHistogram', (np.uint32, 256)), - ('ImageCentreSquareHistogram', (np.uint32, 256)), - ('SESpaceCornerHistogram', (np.uint32, 128)), - ('SWSpaceCornerHistogram', (np.uint32, 128)), - ('NESpaceCornerHistogram', (np.uint32, 128)), - ('NWSpaceCornerHistogram', (np.uint32, 128)), - ('FullImageEntropy', (np.float32, 3)), - ('EarthDiskEntropy', (np.float32, 3)), - ('ImageCentreSquareEntropy', (np.float32, 3)), - ('SESpaceCornerEntropy', (np.float32, 3)), - ('SWSpaceCornerEntropy', (np.float32, 3)), - ('NESpaceCornerEntropy', (np.float32, 3)), - ('NWSpaceCornerEntropy', (np.float32, 3)), - ('4SpaceCornersEntropy', (np.float32, 3)), - ('ImageCentreSquarePSD_EW', (np.float32, 128)), - ('FullImagePSD_EW', (np.float32, 128)), - ('ImageCentreSquarePSD_NS', (np.float32, 128)), - ('FullImagePSD_NS', (np.float32, 128)) + ("FullImageMinimumCount", np.uint16), + ("FullImageMaximumCount", np.uint16), + ("EarthDiskMinimumCount", np.uint16), + ("EarthDiskMaximumCount", np.uint16), + ("MoonMinimumCount", np.uint16), + ("MoonMaximumCount", np.uint16), + ("FullImageMeanCount", np.float32), + ("FullImageStandardDeviation", np.float32), + ("EarthDiskMeanCount", np.float32), + ("EarthDiskStandardDeviation", np.float32), + ("MoonMeanCount", np.float32), + ("MoonStandardDeviation", np.float32), + ("SpaceMeanCount", np.float32), + ("SpaceStandardDeviation", np.float32), + ("SESpaceCornerMeanCount", np.float32), + ("SESpaceCornerStandardDeviation", np.float32), + ("SWSpaceCornerMeanCount", np.float32), + ("SWSpaceCornerStandardDeviation", np.float32), + ("NESpaceCornerMeanCount", np.float32), + ("NESpaceCornerStandardDeviation", np.float32), + ("NWSpaceCornerMeanCount", np.float32), + ("NWSpaceCornerStandardDeviation", np.float32), + ("4SpaceCornersMeanCount", np.float32), + ("4SpaceCornersStandardDeviation", np.float32), + ("FullImageHistogram", (np.uint32, 256)), + ("EarthDiskHistogram", (np.uint32, 256)), + ("ImageCentreSquareHistogram", (np.uint32, 256)), + ("SESpaceCornerHistogram", (np.uint32, 128)), + ("SWSpaceCornerHistogram", (np.uint32, 128)), + ("NESpaceCornerHistogram", (np.uint32, 128)), + ("NWSpaceCornerHistogram", (np.uint32, 128)), + ("FullImageEntropy", (np.float32, 3)), + ("EarthDiskEntropy", (np.float32, 3)), + ("ImageCentreSquareEntropy", (np.float32, 3)), + ("SESpaceCornerEntropy", (np.float32, 3)), + ("SWSpaceCornerEntropy", (np.float32, 3)), + ("NESpaceCornerEntropy", (np.float32, 3)), + ("NWSpaceCornerEntropy", (np.float32, 3)), + ("4SpaceCornersEntropy", (np.float32, 3)), + ("ImageCentreSquarePSD_EW", (np.float32, 128)), + ("FullImagePSD_EW", (np.float32, 128)), + ("ImageCentreSquarePSD_NS", (np.float32, 128)), + ("FullImagePSD_NS", (np.float32, 128)) ] l15_rad_quality = [ - ('FullImageMinimumCount', np.uint16), - ('FullImageMaximumCount', np.uint16), - ('EarthDiskMinimumCount', np.uint16), - ('EarthDiskMaximumCount', np.uint16), - ('FullImageMeanCount', np.float32), - ('FullImageStandardDeviation', np.float32), - ('EarthDiskMeanCount', np.float32), - ('EarthDiskStandardDeviation', np.float32), - ('SpaceMeanCount', np.float32), - ('SpaceStandardDeviation', np.float32), - ('FullImageHistogram', (np.uint32, 256)), - ('EarthDiskHistogram', (np.uint32, 256)), - ('ImageCentreSquareHistogram', (np.uint32, 256)), - ('FullImageEntropy', (np.float32, 3)), - ('EarthDiskEntropy', (np.float32, 3)), - ('ImageCentreSquareEntropy', (np.float32, 3)), - ('ImageCentreSquarePSD_EW', (np.float32, 128)), - ('FullImagePSD_EW', (np.float32, 128)), - ('ImageCentreSquarePSD_NS', (np.float32, 128)), - ('FullImagePSD_NS', (np.float32, 128)), - ('SESpaceCornerL15_RMS', np.float32), - ('SESpaceCornerL15_Mean', np.float32), - ('SWSpaceCornerL15_RMS', np.float32), - ('SWSpaceCornerL15_Mean', np.float32), - ('NESpaceCornerL15_RMS', np.float32), - ('NESpaceCornerL15_Mean', np.float32), - ('NWSpaceCornerL15_RMS', np.float32), - ('NWSpaceCornerL15_Mean', np.float32) + ("FullImageMinimumCount", np.uint16), + ("FullImageMaximumCount", np.uint16), + ("EarthDiskMinimumCount", np.uint16), + ("EarthDiskMaximumCount", np.uint16), + ("FullImageMeanCount", np.float32), + ("FullImageStandardDeviation", np.float32), + ("EarthDiskMeanCount", np.float32), + ("EarthDiskStandardDeviation", np.float32), + ("SpaceMeanCount", np.float32), + ("SpaceStandardDeviation", np.float32), + ("FullImageHistogram", (np.uint32, 256)), + ("EarthDiskHistogram", (np.uint32, 256)), + ("ImageCentreSquareHistogram", (np.uint32, 256)), + ("FullImageEntropy", (np.float32, 3)), + ("EarthDiskEntropy", (np.float32, 3)), + ("ImageCentreSquareEntropy", (np.float32, 3)), + ("ImageCentreSquarePSD_EW", (np.float32, 128)), + ("FullImagePSD_EW", (np.float32, 128)), + ("ImageCentreSquarePSD_NS", (np.float32, 128)), + ("FullImagePSD_NS", (np.float32, 128)), + ("SESpaceCornerL15_RMS", np.float32), + ("SESpaceCornerL15_Mean", np.float32), + ("SWSpaceCornerL15_RMS", np.float32), + ("SWSpaceCornerL15_Mean", np.float32), + ("NESpaceCornerL15_RMS", np.float32), + ("NESpaceCornerL15_Mean", np.float32), + ("NWSpaceCornerL15_RMS", np.float32), + ("NWSpaceCornerL15_Mean", np.float32) ] record = [ - ('L10RadQuality', (l10_rad_quality, 42)), - ('L15RadQuality', (l15_rad_quality, 12)) + ("L10RadQuality", (l10_rad_quality, 42)), + ("L15RadQuality", (l15_rad_quality, 12)) ] return record @@ -928,19 +928,19 @@ def radiometric_quality(self): def geometric_quality(self): """Get geometric quality record data.""" absolute_accuracy = [ - ('QualityInfoValidity', np.uint8), - ('EastWestAccuracyRMS', np.float32), - ('NorthSouthAccuracyRMS', np.float32), - ('MagnitudeRMS', np.float32), - ('EastWestUncertaintyRMS', np.float32), - ('NorthSouthUncertaintyRMS', np.float32), - ('MagnitudeUncertaintyRMS', np.float32), - ('EastWestMaxDeviation', np.float32), - ('NorthSouthMaxDeviation', np.float32), - ('MagnitudeMaxDeviation', np.float32), - ('EastWestUncertaintyMax', np.float32), - ('NorthSouthUncertaintyMax', np.float32), - ('MagnitudeUncertaintyMax', np.float32) + ("QualityInfoValidity", np.uint8), + ("EastWestAccuracyRMS", np.float32), + ("NorthSouthAccuracyRMS", np.float32), + ("MagnitudeRMS", np.float32), + ("EastWestUncertaintyRMS", np.float32), + ("NorthSouthUncertaintyRMS", np.float32), + ("MagnitudeUncertaintyRMS", np.float32), + ("EastWestMaxDeviation", np.float32), + ("NorthSouthMaxDeviation", np.float32), + ("MagnitudeMaxDeviation", np.float32), + ("EastWestUncertaintyMax", np.float32), + ("NorthSouthUncertaintyMax", np.float32), + ("MagnitudeUncertaintyMax", np.float32) ] relative_accuracy = absolute_accuracy @@ -948,35 +948,35 @@ def geometric_quality(self): pixels_16_relative_accuracy = absolute_accuracy misregistration_residuals = [ - ('QualityInfoValidity', np.uint8), - ('EastWestResidual', np.float32), - ('NorthSouthResidual', np.float32), - ('EastWestUncertainty', np.float32), - ('NorthSouthUncertainty', np.float32), - ('EastWestRMS', np.float32), - ('NorthSouthRMS', np.float32), - ('EastWestMagnitude', np.float32), - ('NorthSouthMagnitude', np.float32), - ('EastWestMagnitudeUncertainty', np.float32), - ('NorthSouthMagnitudeUncertainty', np.float32) + ("QualityInfoValidity", np.uint8), + ("EastWestResidual", np.float32), + ("NorthSouthResidual", np.float32), + ("EastWestUncertainty", np.float32), + ("NorthSouthUncertainty", np.float32), + ("EastWestRMS", np.float32), + ("NorthSouthRMS", np.float32), + ("EastWestMagnitude", np.float32), + ("NorthSouthMagnitude", np.float32), + ("EastWestMagnitudeUncertainty", np.float32), + ("NorthSouthMagnitudeUncertainty", np.float32) ] geometric_quality_status = [ - ('QualityNominal', np.uint8), - ('NominalAbsolute', np.uint8), - ('NominalRelativeToPreviousImage', np.uint8), - ('NominalForREL500', np.uint8), - ('NominalForREL16', np.uint8), - ('NominalForResMisreg', np.uint8) + ("QualityNominal", np.uint8), + ("NominalAbsolute", np.uint8), + ("NominalRelativeToPreviousImage", np.uint8), + ("NominalForREL500", np.uint8), + ("NominalForREL16", np.uint8), + ("NominalForResMisreg", np.uint8) ] record = [ - ('AbsoluteAccuracy', (absolute_accuracy, 12)), - ('RelativeAccuracy', (relative_accuracy, 12)), - ('500PixelsRelativeAccuracy', (pixels_500_relative_accuracy, 12)), - ('16PixelsRelativeAccuracy', (pixels_16_relative_accuracy, 12)), - ('MisregistrationResiduals', (misregistration_residuals, 12)), - ('GeometricQualityStatus', (geometric_quality_status, 12)) + ("AbsoluteAccuracy", (absolute_accuracy, 12)), + ("RelativeAccuracy", (relative_accuracy, 12)), + ("500PixelsRelativeAccuracy", (pixels_500_relative_accuracy, 12)), + ("16PixelsRelativeAccuracy", (pixels_16_relative_accuracy, 12)), + ("MisregistrationResiduals", (misregistration_residuals, 12)), + ("GeometricQualityStatus", (geometric_quality_status, 12)) ] return record @@ -985,22 +985,22 @@ def geometric_quality(self): def timeliness_and_completeness(self): """Get time and completeness record data.""" timeliness = [ - ('MaxDelay', np.float32), - ('MinDelay', np.float32), - ('MeanDelay', np.float32) + ("MaxDelay", np.float32), + ("MinDelay", np.float32), + ("MeanDelay", np.float32) ] completeness = [ - ('PlannedL15ImageLines', np.uint16), - ('GeneratedL15ImageLines', np.uint16), - ('ValidL15ImageLines', np.uint16), - ('DummyL15ImageLines', np.uint16), - ('CorruptedL15ImageLines', np.uint16) + ("PlannedL15ImageLines", np.uint16), + ("GeneratedL15ImageLines", np.uint16), + ("ValidL15ImageLines", np.uint16), + ("DummyL15ImageLines", np.uint16), + ("CorruptedL15ImageLines", np.uint16) ] record = [ - ('Timeliness', timeliness), - ('Completeness', (completeness, 12)) + ("Timeliness", timeliness), + ("Completeness", (completeness, 12)) ] return record @@ -1013,15 +1013,15 @@ def get(self): """Get record data array.""" # X bytes record = [ - ('SatelliteStatus', self.satellite_status), - ('ImageAcquisition', self.image_acquisition), - ('CelestialEvents', self.celestial_events), - ('ImageDescription', self.image_description), - ('RadiometricProcessing', self.radiometric_processing), - ('GeometricProcessing', self.geometric_processing) + ("SatelliteStatus", self.satellite_status), + ("ImageAcquisition", self.image_acquisition), + ("CelestialEvents", self.celestial_events), + ("ImageDescription", self.image_description), + ("RadiometricProcessing", self.radiometric_processing), + ("GeometricProcessing", self.geometric_processing) ] - return np.dtype(record).newbyteorder('>') + return np.dtype(record).newbyteorder(">") def get_native_header(with_archive_header=True): @@ -1035,22 +1035,22 @@ def get_native_header(with_archive_header=True): DEFAULT_15_SECONDARY_PRODUCT_HEADER = { - 'NorthLineSelectedRectangle': {'Value': VISIR_NUM_LINES}, - 'SouthLineSelectedRectangle': {'Value': 1}, - 'EastColumnSelectedRectangle': {'Value': 1}, - 'WestColumnSelectedRectangle': {'Value': VISIR_NUM_COLUMNS}, - 'NumberColumnsVISIR': {'Value': VISIR_NUM_COLUMNS}, - 'NumberLinesVISIR': {'Value': VISIR_NUM_LINES}, - 'NumberColumnsHRV': {'Value': HRV_NUM_COLUMNS}, - 'NumberLinesHRV': {'Value': HRV_NUM_LINES}, - 'SelectedBandIDs': {'Value': 'XXXXXXXXXXXX'} + "NorthLineSelectedRectangle": {"Value": VISIR_NUM_LINES}, + "SouthLineSelectedRectangle": {"Value": 1}, + "EastColumnSelectedRectangle": {"Value": 1}, + "WestColumnSelectedRectangle": {"Value": VISIR_NUM_COLUMNS}, + "NumberColumnsVISIR": {"Value": VISIR_NUM_COLUMNS}, + "NumberLinesVISIR": {"Value": VISIR_NUM_LINES}, + "NumberColumnsHRV": {"Value": HRV_NUM_COLUMNS}, + "NumberLinesHRV": {"Value": HRV_NUM_LINES}, + "SelectedBandIDs": {"Value": "XXXXXXXXXXXX"} } """Default secondary product header for files containing all channels.""" hrit_epilogue = np.dtype( - Msg15NativeTrailerRecord().seviri_l15_trailer).newbyteorder('>') + Msg15NativeTrailerRecord().seviri_l15_trailer).newbyteorder(">") hrit_prologue = HritPrologue().get() impf_configuration = np.dtype( - L15DataHeaderRecord().impf_configuration).newbyteorder('>') + L15DataHeaderRecord().impf_configuration).newbyteorder(">") native_trailer = Msg15NativeTrailerRecord().get() diff --git a/satpy/readers/seviri_l1b_nc.py b/satpy/readers/seviri_l1b_nc.py index ae56053114..82e3b15297 100644 --- a/satpy/readers/seviri_l1b_nc.py +++ b/satpy/readers/seviri_l1b_nc.py @@ -41,7 +41,7 @@ ) from satpy.utils import get_legacy_chunk_size -logger = logging.getLogger('nc_msg') +logger = logging.getLogger("nc_msg") CHUNK_SIZE = get_legacy_chunk_size() @@ -73,9 +73,9 @@ def __init__(self, filename, filename_info, filetype_info, @property def _repeat_cycle_duration(self): """Get repeat cycle duration from the metadata.""" - if self.nc.attrs['nominal_image_scanning'] == 'T': + if self.nc.attrs["nominal_image_scanning"] == "T": return 15 - elif self.nc.attrs['reduced_scanning'] == 'T': + elif self.nc.attrs["reduced_scanning"] == "T": return 5 @property @@ -114,55 +114,55 @@ def end_time(self): def nc(self): """Read the file.""" return open_dataset(self.filename, decode_cf=True, mask_and_scale=False, - chunks=CHUNK_SIZE).rename({'num_columns_vis_ir': 'x', - 'num_rows_vis_ir': 'y'}) + chunks=CHUNK_SIZE).rename({"num_columns_vis_ir": "x", + "num_rows_vis_ir": "y"}) def get_metadata(self): """Get metadata.""" # Obtain some area definition attributes - equatorial_radius = self.nc.attrs['equatorial_radius'] * 1000. - polar_radius = (self.nc.attrs['north_polar_radius'] * 1000 + self.nc.attrs['south_polar_radius'] * 1000) * 0.5 - ssp_lon = self.nc.attrs['longitude_of_SSP'] - self.mda['vis_ir_grid_origin'] = self.nc.attrs['vis_ir_grid_origin'] - self.mda['vis_ir_column_dir_grid_step'] = self.nc.attrs['vis_ir_column_dir_grid_step'] * 1000.0 - self.mda['vis_ir_line_dir_grid_step'] = self.nc.attrs['vis_ir_line_dir_grid_step'] * 1000.0 + equatorial_radius = self.nc.attrs["equatorial_radius"] * 1000. + polar_radius = (self.nc.attrs["north_polar_radius"] * 1000 + self.nc.attrs["south_polar_radius"] * 1000) * 0.5 + ssp_lon = self.nc.attrs["longitude_of_SSP"] + self.mda["vis_ir_grid_origin"] = self.nc.attrs["vis_ir_grid_origin"] + self.mda["vis_ir_column_dir_grid_step"] = self.nc.attrs["vis_ir_column_dir_grid_step"] * 1000.0 + self.mda["vis_ir_line_dir_grid_step"] = self.nc.attrs["vis_ir_line_dir_grid_step"] * 1000.0 # if FSFile is used h5netcdf engine is used which outputs arrays instead of floats for attributes if isinstance(equatorial_radius, np.ndarray): equatorial_radius = equatorial_radius.item() polar_radius = polar_radius.item() ssp_lon = ssp_lon.item() - self.mda['vis_ir_column_dir_grid_step'] = self.mda['vis_ir_column_dir_grid_step'].item() - self.mda['vis_ir_line_dir_grid_step'] = self.mda['vis_ir_line_dir_grid_step'].item() + self.mda["vis_ir_column_dir_grid_step"] = self.mda["vis_ir_column_dir_grid_step"].item() + self.mda["vis_ir_line_dir_grid_step"] = self.mda["vis_ir_line_dir_grid_step"].item() - self.mda['projection_parameters'] = {'a': equatorial_radius, - 'b': polar_radius, - 'h': 35785831.00, - 'ssp_longitude': ssp_lon} + self.mda["projection_parameters"] = {"a": equatorial_radius, + "b": polar_radius, + "h": 35785831.00, + "ssp_longitude": ssp_lon} - self.mda['number_of_lines'] = int(self.nc.dims['y']) - self.mda['number_of_columns'] = int(self.nc.dims['x']) + self.mda["number_of_lines"] = int(self.nc.dims["y"]) + self.mda["number_of_columns"] = int(self.nc.dims["x"]) # only needed for HRV channel which is not implemented yet # self.mda['hrv_number_of_lines'] = int(self.nc.dims['num_rows_hrv']) # self.mda['hrv_number_of_columns'] = int(self.nc.dims['num_columns_hrv']) self.deltaSt = self.reference + datetime.timedelta( - days=int(self.nc.attrs['true_repeat_cycle_start_day']), - milliseconds=int(self.nc.attrs['true_repeat_cycle_start_mi_sec'])) + days=int(self.nc.attrs["true_repeat_cycle_start_day"]), + milliseconds=int(self.nc.attrs["true_repeat_cycle_start_mi_sec"])) self.deltaEnd = self.reference + datetime.timedelta( - days=int(self.nc.attrs['planned_repeat_cycle_end_day']), - milliseconds=int(self.nc.attrs['planned_repeat_cycle_end_mi_sec'])) + days=int(self.nc.attrs["planned_repeat_cycle_end_day"]), + milliseconds=int(self.nc.attrs["planned_repeat_cycle_end_mi_sec"])) - self.north = int(self.nc.attrs['north_most_line']) - self.east = int(self.nc.attrs['east_most_pixel']) - self.west = int(self.nc.attrs['west_most_pixel']) - self.south = int(self.nc.attrs['south_most_line']) - self.platform_id = int(self.nc.attrs['satellite_id']) + self.north = int(self.nc.attrs["north_most_line"]) + self.east = int(self.nc.attrs["east_most_pixel"]) + self.west = int(self.nc.attrs["west_most_pixel"]) + self.south = int(self.nc.attrs["south_most_line"]) + self.platform_id = int(self.nc.attrs["satellite_id"]) def get_dataset(self, dataset_id, dataset_info): """Get the dataset.""" - dataset = self.nc[dataset_info['nc_key']] + dataset = self.nc[dataset_info["nc_key"]] # Correct for the scan line order # TODO: Move _add_scanline_acq_time() call to the end of the method @@ -171,7 +171,7 @@ def get_dataset(self, dataset_id, dataset_info): dataset = dataset.sel(y=slice(None, None, -1)) dataset = self.calibrate(dataset, dataset_id) - is_calibration = dataset_id['calibration'] in ['radiance', 'reflectance', 'brightness_temperature'] + is_calibration = dataset_id["calibration"] in ["radiance", "reflectance", "brightness_temperature"] if (is_calibration and self.mask_bad_quality_scan_lines): # noqa: E129 dataset = self._mask_bad_quality(dataset, dataset_info) @@ -180,17 +180,17 @@ def get_dataset(self, dataset_id, dataset_info): def calibrate(self, dataset, dataset_id): """Calibrate the data.""" - channel = dataset_id['name'] - calibration = dataset_id['calibration'] + channel = dataset_id["name"] + calibration = dataset_id["calibration"] - if dataset_id['calibration'] == 'counts': - dataset.attrs['_FillValue'] = 0 + if dataset_id["calibration"] == "counts": + dataset.attrs["_FillValue"] = 0 calib = SEVIRICalibrationHandler( platform_id=int(self.platform_id), channel_name=channel, coefs=self._get_calib_coefs(dataset, channel), - calib_mode='NOMINAL', + calib_mode="NOMINAL", scan_time=self.observation_start_time ) @@ -199,59 +199,59 @@ def calibrate(self, dataset, dataset_id): def _get_calib_coefs(self, dataset, channel): """Get coefficients for calibration from counts to radiance.""" band_idx = list(CHANNEL_NAMES.values()).index(channel) - offset = dataset.attrs['add_offset'].astype('float32') - gain = dataset.attrs['scale_factor'].astype('float32') + offset = dataset.attrs["add_offset"].astype("float32") + gain = dataset.attrs["scale_factor"].astype("float32") # Only one calibration available here return { - 'coefs': { - 'NOMINAL': { - 'gain': gain, - 'offset': offset + "coefs": { + "NOMINAL": { + "gain": gain, + "offset": offset }, - 'EXTERNAL': self.ext_calib_coefs.get(channel, {}) + "EXTERNAL": self.ext_calib_coefs.get(channel, {}) }, - 'radiance_type': self.nc['planned_chan_processing'].values[band_idx] + "radiance_type": self.nc["planned_chan_processing"].values[band_idx] } def _mask_bad_quality(self, dataset, dataset_info): """Mask scanlines with bad quality.""" - ch_number = int(dataset_info['nc_key'][2:]) - line_validity = self.nc['channel_data_visir_data_line_validity'][:, ch_number - 1].data - line_geometric_quality = self.nc['channel_data_visir_data_line_geometric_quality'][:, ch_number - 1].data - line_radiometric_quality = self.nc['channel_data_visir_data_line_radiometric_quality'][:, ch_number - 1].data + ch_number = int(dataset_info["nc_key"][2:]) + line_validity = self.nc["channel_data_visir_data_line_validity"][:, ch_number - 1].data + line_geometric_quality = self.nc["channel_data_visir_data_line_geometric_quality"][:, ch_number - 1].data + line_radiometric_quality = self.nc["channel_data_visir_data_line_radiometric_quality"][:, ch_number - 1].data return mask_bad_quality(dataset, line_validity, line_geometric_quality, line_radiometric_quality) def _update_attrs(self, dataset, dataset_info): """Update dataset attributes.""" - dataset.attrs.update(self.nc[dataset_info['nc_key']].attrs) + dataset.attrs.update(self.nc[dataset_info["nc_key"]].attrs) dataset.attrs.update(dataset_info) - dataset.attrs['platform_name'] = "Meteosat-" + SATNUM[self.platform_id] - dataset.attrs['sensor'] = 'seviri' - dataset.attrs['orbital_parameters'] = { - 'projection_longitude': self.mda['projection_parameters']['ssp_longitude'], - 'projection_latitude': 0., - 'projection_altitude': self.mda['projection_parameters']['h'], - 'satellite_nominal_longitude': float( - self.nc.attrs['nominal_longitude'] + dataset.attrs["platform_name"] = "Meteosat-" + SATNUM[self.platform_id] + dataset.attrs["sensor"] = "seviri" + dataset.attrs["orbital_parameters"] = { + "projection_longitude": self.mda["projection_parameters"]["ssp_longitude"], + "projection_latitude": 0., + "projection_altitude": self.mda["projection_parameters"]["h"], + "satellite_nominal_longitude": float( + self.nc.attrs["nominal_longitude"] ), - 'satellite_nominal_latitude': 0.0, + "satellite_nominal_latitude": 0.0, } - dataset.attrs['time_parameters'] = { - 'nominal_start_time': self.nominal_start_time, - 'nominal_end_time': self.nominal_end_time, - 'observation_start_time': self.observation_start_time, - 'observation_end_time': self.observation_end_time, + dataset.attrs["time_parameters"] = { + "nominal_start_time": self.nominal_start_time, + "nominal_end_time": self.nominal_end_time, + "observation_start_time": self.observation_start_time, + "observation_end_time": self.observation_end_time, } try: actual_lon, actual_lat, actual_alt = self.satpos - dataset.attrs['orbital_parameters'].update({ - 'satellite_actual_longitude': actual_lon, - 'satellite_actual_latitude': actual_lat, - 'satellite_actual_altitude': actual_alt, + dataset.attrs["orbital_parameters"].update({ + "satellite_actual_longitude": actual_lon, + "satellite_actual_latitude": actual_lat, + "satellite_actual_altitude": actual_alt, }) except NoValidOrbitParams as err: logger.warning(err) - dataset.attrs['georef_offset_corrected'] = self._get_earth_model() == 2 + dataset.attrs["georef_offset_corrected"] = self._get_earth_model() == 2 # remove attributes from original file which don't apply anymore strip_attrs = ["comment", "long_name", "nc_key", "scale_factor", "add_offset", "valid_min", "valid_max"] @@ -277,30 +277,30 @@ def get_area_def(self, dataset_id): """ pdict = {} - pdict['a'] = self.mda['projection_parameters']['a'] - pdict['b'] = self.mda['projection_parameters']['b'] - pdict['h'] = self.mda['projection_parameters']['h'] - pdict['ssp_lon'] = self.mda['projection_parameters']['ssp_longitude'] - - area_naming_input_dict = {'platform_name': 'msg', - 'instrument_name': 'seviri', - 'resolution': int(dataset_id['resolution']) + pdict["a"] = self.mda["projection_parameters"]["a"] + pdict["b"] = self.mda["projection_parameters"]["b"] + pdict["h"] = self.mda["projection_parameters"]["h"] + pdict["ssp_lon"] = self.mda["projection_parameters"]["ssp_longitude"] + + area_naming_input_dict = {"platform_name": "msg", + "instrument_name": "seviri", + "resolution": int(dataset_id["resolution"]) } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('seviri', pdict['ssp_lon'])}) - - if dataset_id['name'] == 'HRV': - pdict['nlines'] = self.mda['hrv_number_of_lines'] - pdict['ncols'] = self.mda['hrv_number_of_columns'] - pdict['a_name'] = area_naming['area_id'] - pdict['a_desc'] = area_naming['description'] - pdict['p_id'] = "" + **get_service_mode("seviri", pdict["ssp_lon"])}) + + if dataset_id["name"] == "HRV": + pdict["nlines"] = self.mda["hrv_number_of_lines"] + pdict["ncols"] = self.mda["hrv_number_of_columns"] + pdict["a_name"] = area_naming["area_id"] + pdict["a_desc"] = area_naming["description"] + pdict["p_id"] = "" else: - pdict['nlines'] = self.mda['number_of_lines'] - pdict['ncols'] = self.mda['number_of_columns'] - pdict['a_name'] = area_naming['area_id'] - pdict['a_desc'] = area_naming['description'] - pdict['p_id'] = "" + pdict["nlines"] = self.mda["number_of_lines"] + pdict["ncols"] = self.mda["number_of_columns"] + pdict["a_name"] = area_naming["area_id"] + pdict["a_desc"] = area_naming["description"] + pdict["p_id"] = "" area = get_area_definition(pdict, self.get_area_extent(dataset_id)) @@ -310,20 +310,20 @@ def get_area_extent(self, dsid): """Get the area extent.""" # following calculations assume grid origin is south-east corner # section 7.2.4 of MSG Level 1.5 Image Data Format Description - origins = {0: 'NW', 1: 'SW', 2: 'SE', 3: 'NE'} - grid_origin = self.mda['vis_ir_grid_origin'] + origins = {0: "NW", 1: "SW", 2: "SE", 3: "NE"} + grid_origin = self.mda["vis_ir_grid_origin"] grid_origin = int(grid_origin, 16) if grid_origin != 2: raise NotImplementedError( - 'Grid origin not supported number: {}, {} corner' + "Grid origin not supported number: {}, {} corner" .format(grid_origin, origins[grid_origin]) ) center_point = 3712 / 2 - column_step = self.mda['vis_ir_column_dir_grid_step'] + column_step = self.mda["vis_ir_column_dir_grid_step"] - line_step = self.mda['vis_ir_line_dir_grid_step'] + line_step = self.mda["vis_ir_line_dir_grid_step"] # check for Earth model as this affects the north-south and # west-east offsets @@ -337,7 +337,7 @@ def get_area_extent(self, dsid): we_offset = 0.5 # west +ve else: raise NotImplementedError( - 'unrecognised earth model: {}'.format(earth_model) + "unrecognised earth model: {}".format(earth_model) ) # section 3.1.5 of MSG Level 1.5 Image Data Format Description ll_c = (center_point - self.west - 0.5 + we_offset) * column_step @@ -349,7 +349,7 @@ def get_area_extent(self, dsid): return area_extent def _add_scanline_acq_time(self, dataset, dataset_id): - if dataset_id['name'] == 'HRV': + if dataset_id["name"] == "HRV": # TODO: Enable once HRV reading has been fixed. return # days, msecs = self._get_acq_time_hrv() @@ -359,16 +359,16 @@ def _add_scanline_acq_time(self, dataset, dataset_id): add_scanline_acq_time(dataset, acq_time) def _get_acq_time_hrv(self): - day_key = 'channel_data_hrv_data_l10_line_mean_acquisition_time_day' - msec_key = 'channel_data_hrv_data_l10_line_mean_acquisition_msec' + day_key = "channel_data_hrv_data_l10_line_mean_acquisition_time_day" + msec_key = "channel_data_hrv_data_l10_line_mean_acquisition_msec" days = self.nc[day_key].isel(channels_hrv_dim=0) msecs = self.nc[msec_key].isel(channels_hrv_dim=0) return days, msecs def _get_acq_time_visir(self, dataset_id): - band_idx = list(CHANNEL_NAMES.values()).index(dataset_id['name']) - day_key = 'channel_data_visir_data_l10_line_mean_acquisition_time_day' - msec_key = 'channel_data_visir_data_l10_line_mean_acquisition_msec' + band_idx = list(CHANNEL_NAMES.values()).index(dataset_id["name"]) + day_key = "channel_data_visir_data_l10_line_mean_acquisition_time_day" + msec_key = "channel_data_visir_data_l10_line_mean_acquisition_msec" days = self.nc[day_key].isel(channels_vis_ir_dim=band_idx) msecs = self.nc[msec_key].isel(channels_vis_ir_dim=band_idx) return days, msecs @@ -382,31 +382,31 @@ def satpos(self): Returns: Longitude [deg east], Latitude [deg north] and Altitude [m] """ start_times_poly = get_cds_time( - days=self.nc['orbit_polynomial_start_time_day'].values, - msecs=self.nc['orbit_polynomial_start_time_msec'].values + days=self.nc["orbit_polynomial_start_time_day"].values, + msecs=self.nc["orbit_polynomial_start_time_msec"].values ) end_times_poly = get_cds_time( - days=self.nc['orbit_polynomial_end_time_day'].values, - msecs=self.nc['orbit_polynomial_end_time_msec'].values + days=self.nc["orbit_polynomial_end_time_day"].values, + msecs=self.nc["orbit_polynomial_end_time_msec"].values ) orbit_polynomials = { - 'StartTime': np.array([start_times_poly]), - 'EndTime': np.array([end_times_poly]), - 'X': self.nc['orbit_polynomial_x'].values, - 'Y': self.nc['orbit_polynomial_y'].values, - 'Z': self.nc['orbit_polynomial_z'].values, + "StartTime": np.array([start_times_poly]), + "EndTime": np.array([end_times_poly]), + "X": self.nc["orbit_polynomial_x"].values, + "Y": self.nc["orbit_polynomial_y"].values, + "Z": self.nc["orbit_polynomial_z"].values, } poly_finder = OrbitPolynomialFinder(orbit_polynomials) orbit_polynomial = poly_finder.get_orbit_polynomial(self.start_time) return get_satpos( orbit_polynomial=orbit_polynomial, time=self.start_time, - semi_major_axis=self.mda['projection_parameters']['a'], - semi_minor_axis=self.mda['projection_parameters']['b'], + semi_major_axis=self.mda["projection_parameters"]["a"], + semi_minor_axis=self.mda["projection_parameters"]["b"], ) def _get_earth_model(self): - return int(self.nc.attrs['type_of_earth_model'], 16) + return int(self.nc.attrs["type_of_earth_model"], 16) class NCSEVIRIHRVFileHandler(NCSEVIRIFileHandler, SEVIRICalibrationHandler): diff --git a/satpy/readers/seviri_l2_bufr.py b/satpy/readers/seviri_l2_bufr.py index cb38f44acf..02aa0c2767 100644 --- a/satpy/readers/seviri_l2_bufr.py +++ b/satpy/readers/seviri_l2_bufr.py @@ -44,15 +44,15 @@ "Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes") CHUNK_SIZE = get_legacy_chunk_size() -logger = logging.getLogger('SeviriL2Bufr') +logger = logging.getLogger("SeviriL2Bufr") -data_center_dict = {55: {'ssp': 'E0415', 'name': '08'}, 56: {'ssp': 'E0455', 'name': '09'}, - 57: {'ssp': 'E0095', 'name': '10'}, 70: {'ssp': 'E0000', 'name': '11'}} +data_center_dict = {55: {"ssp": "E0415", "name": "08"}, 56: {"ssp": "E0455", "name": "09"}, + 57: {"ssp": "E0095", "name": "10"}, 70: {"ssp": "E0000", "name": "11"}} -seg_size_dict = {'seviri_l2_bufr_asr': 16, 'seviri_l2_bufr_cla': 16, - 'seviri_l2_bufr_csr': 16, 'seviri_l2_bufr_gii': 3, - 'seviri_l2_bufr_thu': 16, 'seviri_l2_bufr_toz': 3, - 'seviri_l2_bufr_amv': 24} +seg_size_dict = {"seviri_l2_bufr_asr": 16, "seviri_l2_bufr_cla": 16, + "seviri_l2_bufr_csr": 16, "seviri_l2_bufr_gii": 3, + "seviri_l2_bufr_thu": 16, "seviri_l2_bufr_toz": 3, + "seviri_l2_bufr_amv": 24} class SeviriL2BufrFileHandler(BaseFileHandler): @@ -83,39 +83,39 @@ class SeviriL2BufrFileHandler(BaseFileHandler): """ def __init__(self, filename, filename_info, filetype_info, with_area_definition=False, - rectification_longitude='default', **kwargs): + rectification_longitude="default", **kwargs): """Initialise the file handler for SEVIRI L2 BUFR data.""" super(SeviriL2BufrFileHandler, self).__init__(filename, filename_info, filetype_info) - if ('server' in filename_info): + if ("server" in filename_info): # EUMETSAT Offline Bufr product self.mpef_header = self._read_mpef_header() else: # Product was retrieved from the EUMETSAT Data Center - timeStr = self.get_attribute('typicalDate')+self.get_attribute('typicalTime') + timeStr = self.get_attribute("typicalDate")+self.get_attribute("typicalTime") buf_start_time = datetime.strptime(timeStr, "%Y%m%d%H%M%S") - sc_id = self.get_attribute('satelliteIdentifier') + sc_id = self.get_attribute("satelliteIdentifier") self.mpef_header = {} - self.mpef_header['NominalTime'] = buf_start_time - self.mpef_header['SpacecraftName'] = data_center_dict[sc_id]['name'] - self.mpef_header['RectificationLongitude'] = data_center_dict[sc_id]['ssp'] + self.mpef_header["NominalTime"] = buf_start_time + self.mpef_header["SpacecraftName"] = data_center_dict[sc_id]["name"] + self.mpef_header["RectificationLongitude"] = data_center_dict[sc_id]["ssp"] - if rectification_longitude != 'default': - self.mpef_header['RectificationLongitude'] = f'E{int(rectification_longitude * 10):04d}' + if rectification_longitude != "default": + self.mpef_header["RectificationLongitude"] = f"E{int(rectification_longitude * 10):04d}" self.with_adef = with_area_definition - if self.with_adef and filetype_info['file_type'] == 'seviri_l2_bufr_amv': + if self.with_adef and filetype_info["file_type"] == "seviri_l2_bufr_amv": logging.warning("AMV BUFR data cannot be loaded with an area definition. Setting self.with_def = False.") self.with_adef = False - self.seg_size = seg_size_dict[filetype_info['file_type']] + self.seg_size = seg_size_dict[filetype_info["file_type"]] @property def start_time(self): """Return the repeat cycle start time.""" - return self.mpef_header['NominalTime'] + return self.mpef_header["NominalTime"] @property def end_time(self): @@ -125,13 +125,13 @@ def end_time(self): @property def platform_name(self): """Return spacecraft name.""" - return 'MET{}'.format(self.mpef_header['SpacecraftName']) + return "MET{}".format(self.mpef_header["SpacecraftName"]) @property def ssp_lon(self): """Return subsatellite point longitude.""" # e.g. E0415 - ssp_lon = self.mpef_header['RectificationLongitude'] + ssp_lon = self.mpef_header["RectificationLongitude"] return float(ssp_lon[1:])/10. def get_area_def(self, key): @@ -157,7 +157,7 @@ def get_attribute(self, key): bufr = ec.codes_bufr_new_from_file(fh) if bufr is None: break - ec.codes_set(bufr, 'unpack', 1) + ec.codes_set(bufr, "unpack", 1) attr = ec.codes_get(bufr, key) ec.codes_release(bufr) @@ -173,7 +173,7 @@ def get_array(self, key): if bufr is None: break - ec.codes_set(bufr, 'unpack', 1) + ec.codes_set(bufr, "unpack", 1) # if is the first message initialise our final array if (msgCount == 0): @@ -199,18 +199,18 @@ def get_dataset(self, dataset_id, dataset_info): and create the dataset with or without an AreaDefinition. """ - arr = self.get_array(dataset_info['key']) + arr = self.get_array(dataset_info["key"]) if self.with_adef: xarr = self.get_dataset_with_area_def(arr, dataset_id) # coordinates are not relevant when returning data with an AreaDefinition - if 'coordinates' in dataset_info.keys(): - del dataset_info['coordinates'] + if "coordinates" in dataset_info.keys(): + del dataset_info["coordinates"] else: xarr = xr.DataArray(arr, dims=["y"]) - if 'fill_value' in dataset_info: - xarr = xarr.where(xarr != dataset_info['fill_value']) + if "fill_value" in dataset_info: + xarr = xarr.where(xarr != dataset_info["fill_value"]) self._add_attributes(xarr, dataset_info) @@ -218,8 +218,8 @@ def get_dataset(self, dataset_id, dataset_info): def get_dataset_with_area_def(self, arr, dataset_id): """Get dataset with an AreaDefinition.""" - if dataset_id['name'] in ['latitude', 'longitude']: - self.__setattr__(dataset_id['name'], arr) + if dataset_id["name"] in ["latitude", "longitude"]: + self.__setattr__(dataset_id["name"], arr) xarr = xr.DataArray(arr, dims=["y"]) else: lons_1d, lats_1d, data_1d = da.compute(self.longitude, self.latitude, arr) @@ -231,13 +231,13 @@ def get_dataset_with_area_def(self, arr, dataset_id): data_2d[:] = np.nan data_2d[irow.compressed(), icol.compressed()] = data_1d[~irow.mask] - xarr = xr.DataArray(da.from_array(data_2d, CHUNK_SIZE), dims=('y', 'x')) + xarr = xr.DataArray(da.from_array(data_2d, CHUNK_SIZE), dims=("y", "x")) ntotal = len(icol) nvalid = len(icol.compressed()) if nvalid < ntotal: - logging.warning(f'{ntotal-nvalid} out of {ntotal} data points could not be put on ' - f'the grid {self._area_def.area_id}.') + logging.warning(f"{ntotal-nvalid} out of {ntotal} data points could not be put on " + f"the grid {self._area_def.area_id}.") return xarr @@ -248,31 +248,31 @@ def _construct_area_def(self, dataset_id): AreaDefinition: A pyresample AreaDefinition object containing the area definition. """ - res = dataset_id['resolution'] + res = dataset_id["resolution"] - area_naming_input_dict = {'platform_name': 'msg', - 'instrument_name': 'seviri', - 'resolution': res, + area_naming_input_dict = {"platform_name": "msg", + "instrument_name": "seviri", + "resolution": res, } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('seviri', self.ssp_lon)}) + **get_service_mode("seviri", self.ssp_lon)}) # Datasets with a segment size of 3 pixels extend outside the original SEVIRI 3km grid (with 1238 x 1238 # segments a 3 pixels). Hence, we need to use corresponding area defintions in areas.yaml if self.seg_size == 3: - area_naming['area_id'] += '_ext' - area_naming['description'] += ' (extended outside original 3km grid)' + area_naming["area_id"] += "_ext" + area_naming["description"] += " (extended outside original 3km grid)" # Construct AreaDefinition from standardized area definition in areas.yaml. - stand_area_def = get_area_def(area_naming['area_id']) + stand_area_def = get_area_def(area_naming["area_id"]) return stand_area_def def _add_attributes(self, xarr, dataset_info): """Add dataset attributes to xarray.""" - xarr.attrs['sensor'] = 'SEVIRI' - xarr.attrs['platform_name'] = self.platform_name - xarr.attrs['ssp_lon'] = self.ssp_lon - xarr.attrs['seg_size'] = self.seg_size + xarr.attrs["sensor"] = "SEVIRI" + xarr.attrs["platform_name"] = self.platform_name + xarr.attrs["ssp_lon"] = self.ssp_lon + xarr.attrs["seg_size"] = self.seg_size xarr.attrs.update(dataset_info) diff --git a/satpy/readers/seviri_l2_grib.py b/satpy/readers/seviri_l2_grib.py index 214193039d..b69c60e7ac 100644 --- a/satpy/readers/seviri_l2_grib.py +++ b/satpy/readers/seviri_l2_grib.py @@ -57,7 +57,7 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Return the sensing start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): @@ -66,8 +66,8 @@ def end_time(self): def get_area_def(self, dataset_id): """Return the area definition for a dataset.""" - self._area_dict['column_step'] = dataset_id["resolution"] - self._area_dict['line_step'] = dataset_id["resolution"] + self._area_dict["column_step"] = dataset_id["resolution"] + self._area_dict["line_step"] = dataset_id["resolution"] area_extent = calculate_area_extent(self._area_dict) @@ -86,12 +86,12 @@ def get_dataset(self, dataset_id, dataset_info): dimensions within a given message if the file was only partly read (not looping over all messages) in an earlier instance. """ - logger.debug('Reading in file to get dataset with parameter number %d.', - dataset_info['parameter_number']) + logger.debug("Reading in file to get dataset with parameter number %d.", + dataset_info["parameter_number"]) xarr = None message_found = False - with open(self.filename, 'rb') as fh: + with open(self.filename, "rb") as fh: # Iterate over all messages and fetch data when the correct parameter number is found while True: @@ -101,19 +101,19 @@ def get_dataset(self, dataset_id, dataset_info): if not message_found: # Could not obtain a valid message ID from the grib file logger.warning("Could not find parameter_number %d in GRIB file, no valid Dataset created", - dataset_info['parameter_number']) + dataset_info["parameter_number"]) break # Check if the parameter number in the GRIB message corresponds to the required key - parameter_number = self._get_from_msg(gid, 'parameterNumber') + parameter_number = self._get_from_msg(gid, "parameterNumber") - if parameter_number == dataset_info['parameter_number']: + if parameter_number == dataset_info["parameter_number"]: self._res = dataset_id["resolution"] self._read_attributes(gid) # Read the missing value - missing_value = self._get_from_msg(gid, 'missingValue') + missing_value = self._get_from_msg(gid, "missingValue") # Retrieve values and metadata from the GRIB message, masking the values equal to missing_value xarr = self._get_xarray_from_msg(gid) @@ -137,11 +137,11 @@ def get_dataset(self, dataset_id, dataset_info): def _read_attributes(self, gid): """Read the parameter attributes from the message and create the projection and area dictionaries.""" # Read SSP and date/time - self._ssp_lon = self._get_from_msg(gid, 'longitudeOfSubSatellitePointInDegrees') + self._ssp_lon = self._get_from_msg(gid, "longitudeOfSubSatellitePointInDegrees") # Read number of points on the x and y axes - self._nrows = self._get_from_msg(gid, 'Ny') - self._ncols = self._get_from_msg(gid, 'Nx') + self._nrows = self._get_from_msg(gid, "Ny") + self._ncols = self._get_from_msg(gid, "Nx") # Creates the projection and area dictionaries self._pdict, self._area_dict = self._get_proj_area(gid) @@ -151,6 +151,7 @@ def _get_proj_area(self, gid): Args: gid: The ID of the GRIB message. + Returns: tuple: A tuple of two dictionaries for the projection and the area definition. pdict: @@ -171,45 +172,45 @@ def _get_proj_area(self, gid): south: coodinate of the south limit """ # Get name of area definition - area_naming_input_dict = {'platform_name': 'msg', - 'instrument_name': 'seviri', - 'resolution': self._res, + area_naming_input_dict = {"platform_name": "msg", + "instrument_name": "seviri", + "resolution": self._res, } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode('seviri', self._ssp_lon)}) + **get_service_mode("seviri", self._ssp_lon)}) # Read all projection and area parameters from the message - earth_major_axis_in_meters = self._get_from_msg(gid, 'earthMajorAxis') * 1000.0 # [m] - earth_minor_axis_in_meters = self._get_from_msg(gid, 'earthMinorAxis') * 1000.0 # [m] + earth_major_axis_in_meters = self._get_from_msg(gid, "earthMajorAxis") * 1000.0 # [m] + earth_minor_axis_in_meters = self._get_from_msg(gid, "earthMinorAxis") * 1000.0 # [m] earth_major_axis_in_meters = self._scale_earth_axis(earth_major_axis_in_meters) earth_minor_axis_in_meters = self._scale_earth_axis(earth_minor_axis_in_meters) - nr_in_radius_of_earth = self._get_from_msg(gid, 'NrInRadiusOfEarth') - xp_in_grid_lengths = self._get_from_msg(gid, 'XpInGridLengths') + nr_in_radius_of_earth = self._get_from_msg(gid, "NrInRadiusOfEarth") + xp_in_grid_lengths = self._get_from_msg(gid, "XpInGridLengths") h_in_meters = earth_major_axis_in_meters * (nr_in_radius_of_earth - 1.0) # [m] # Create the dictionary with the projection data pdict = { - 'a': earth_major_axis_in_meters, - 'b': earth_minor_axis_in_meters, - 'h': h_in_meters, - 'ssp_lon': self._ssp_lon, - 'nlines': self._ncols, - 'ncols': self._nrows, - 'a_name': area_naming['area_id'], - 'a_desc': area_naming['description'], - 'p_id': "", + "a": earth_major_axis_in_meters, + "b": earth_minor_axis_in_meters, + "h": h_in_meters, + "ssp_lon": self._ssp_lon, + "nlines": self._ncols, + "ncols": self._nrows, + "a_name": area_naming["area_id"], + "a_desc": area_naming["description"], + "p_id": "", } # Compute the dictionary with the area extension area_dict = { - 'center_point': xp_in_grid_lengths, - 'north': self._nrows, - 'east': 1, - 'west': self._ncols, - 'south': 1, + "center_point": xp_in_grid_lengths, + "north": self._nrows, + "east": 1, + "west": self._ncols, + "south": 1, } return pdict, area_dict @@ -232,12 +233,13 @@ def _get_xarray_from_msg(self, gid): Args: gid: The ID of the GRIB message. + Returns: DataArray: The array containing the retrieved values. """ # Data from GRIB message are read into an Xarray... xarr = xr.DataArray(da.from_array(ec.codes_get_values( - gid).reshape(self._nrows, self._ncols), CHUNK_SIZE), dims=('y', 'x')) + gid).reshape(self._nrows, self._ncols), CHUNK_SIZE), dims=("y", "x")) return xarr @@ -251,13 +253,13 @@ def _get_attributes(self): platform_name: name of the platform """ orbital_parameters = { - 'projection_longitude': self._ssp_lon + "projection_longitude": self._ssp_lon } attributes = { - 'orbital_parameters': orbital_parameters, - 'sensor': 'seviri', - 'platform_name': PLATFORM_DICT[self.filename_info['spacecraft']] + "orbital_parameters": orbital_parameters, + "sensor": "seviri", + "platform_name": PLATFORM_DICT[self.filename_info["spacecraft"]] } return attributes @@ -268,6 +270,7 @@ def _get_from_msg(gid, key): Args: gid: The ID of the GRIB message. key: The key of the required attribute. + Returns: The retrieved attribute or None if the key is missing. """ diff --git a/satpy/readers/slstr_l1b.py b/satpy/readers/slstr_l1b.py index 5024d3d9a1..02aae9f72b 100644 --- a/satpy/readers/slstr_l1b.py +++ b/satpy/readers/slstr_l1b.py @@ -34,30 +34,30 @@ CHUNK_SIZE = get_legacy_chunk_size() -PLATFORM_NAMES = {'S3A': 'Sentinel-3A', - 'S3B': 'Sentinel-3B'} +PLATFORM_NAMES = {"S3A": "Sentinel-3A", + "S3B": "Sentinel-3B"} # These are the default channel adjustment factors. # Defined in the product notice: S3.PN-SLSTR-L1.08 # https://sentinel.esa.int/documents/247904/2731673/Sentinel-3A-and-3B-SLSTR-Product-Notice-Level-1B-SL-1-RBT-at-NRT-and-NTC.pdf -CHANCALIB_FACTORS = {'S1_nadir': 0.97, - 'S2_nadir': 0.98, - 'S3_nadir': 0.98, - 'S4_nadir': 1.0, - 'S5_nadir': 1.11, - 'S6_nadir': 1.13, - 'S7_nadir': 1.0, - 'S8_nadir': 1.0, - 'S9_nadir': 1.0, - 'S1_oblique': 0.94, - 'S2_oblique': 0.95, - 'S3_oblique': 0.95, - 'S4_oblique': 1.0, - 'S5_oblique': 1.04, - 'S6_oblique': 1.07, - 'S7_oblique': 1.0, - 'S8_oblique': 1.0, - 'S9_oblique': 1.0, } +CHANCALIB_FACTORS = {"S1_nadir": 0.97, + "S2_nadir": 0.98, + "S3_nadir": 0.98, + "S4_nadir": 1.0, + "S5_nadir": 1.11, + "S6_nadir": 1.13, + "S7_nadir": 1.0, + "S8_nadir": 1.0, + "S9_nadir": 1.0, + "S1_oblique": 0.94, + "S2_oblique": 0.95, + "S3_oblique": 0.95, + "S4_oblique": 1.0, + "S5_oblique": 1.04, + "S6_oblique": 1.07, + "S7_oblique": 1.0, + "S8_oblique": 1.0, + "S9_oblique": 1.0, } class NCSLSTRGeo(BaseFileHandler): @@ -70,17 +70,17 @@ def __init__(self, filename, filename_info, filetype_info): self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, - chunks={'columns': CHUNK_SIZE, - 'rows': CHUNK_SIZE}) - self.nc = self.nc.rename({'columns': 'x', 'rows': 'y'}) + chunks={"columns": CHUNK_SIZE, + "rows": CHUNK_SIZE}) + self.nc = self.nc.rename({"columns": "x", "rows": "y"}) self.cache = {} def get_dataset(self, key, info): """Load a dataset.""" - logger.debug('Reading %s.', key['name']) - file_key = info['file_key'].format(view=key['view'].name[0], - stripe=key['stripe'].name) + logger.debug("Reading %s.", key["name"]) + file_key = info["file_key"].format(view=key["view"].name[0], + stripe=key["stripe"].name) try: variable = self.nc[file_key] except KeyError: @@ -95,12 +95,12 @@ def get_dataset(self, key, info): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" - return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") class NCSLSTR1B(BaseFileHandler): @@ -132,29 +132,29 @@ def __init__(self, filename, filename_info, filetype_info, self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, - chunks={'columns': CHUNK_SIZE, - 'rows': CHUNK_SIZE}) - self.nc = self.nc.rename({'columns': 'x', 'rows': 'y'}) - self.channel = filename_info['dataset_name'] - self.stripe = filename_info['stripe'] - views = {'n': 'nadir', 'o': 'oblique'} - self.view = views[filename_info['view']] - cal_file = os.path.join(os.path.dirname(self.filename), 'viscal.nc') + chunks={"columns": CHUNK_SIZE, + "rows": CHUNK_SIZE}) + self.nc = self.nc.rename({"columns": "x", "rows": "y"}) + self.channel = filename_info["dataset_name"] + self.stripe = filename_info["stripe"] + views = {"n": "nadir", "o": "oblique"} + self.view = views[filename_info["view"]] + cal_file = os.path.join(os.path.dirname(self.filename), "viscal.nc") self.cal = xr.open_dataset(cal_file, decode_cf=True, mask_and_scale=True, - chunks={'views': CHUNK_SIZE}) + chunks={"views": CHUNK_SIZE}) indices_file = os.path.join(os.path.dirname(self.filename), - 'indices_{}{}.nc'.format(self.stripe, self.view[0])) + "indices_{}{}.nc".format(self.stripe, self.view[0])) self.indices = xr.open_dataset(indices_file, decode_cf=True, mask_and_scale=True, - chunks={'columns': CHUNK_SIZE, - 'rows': CHUNK_SIZE}) - self.indices = self.indices.rename({'columns': 'x', 'rows': 'y'}) + chunks={"columns": CHUNK_SIZE, + "rows": CHUNK_SIZE}) + self.indices = self.indices.rename({"columns": "x", "rows": "y"}) - self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] - self.sensor = 'slstr' + self.platform_name = PLATFORM_NAMES[filename_info["mission_id"]] + self.sensor = "slstr" if isinstance(user_calibration, dict): self.usercalib = user_calibration else: @@ -162,7 +162,7 @@ def __init__(self, filename, filename_info, filetype_info, def _apply_radiance_adjustment(self, radiances): """Adjust SLSTR radiances with default or user supplied values.""" - chan_name = self.channel + '_' + self.view + chan_name = self.channel + "_" + self.view adjust_fac = None if self.usercalib is not None: # If user supplied adjustment, use it. @@ -189,26 +189,26 @@ def _cal_rad(rad, didx, solar_flux=None): def get_dataset(self, key, info): """Load a dataset.""" - if (self.channel not in key['name'] or - self.stripe != key['stripe'].name or - self.view != key['view'].name): + if (self.channel not in key["name"] or + self.stripe != key["stripe"].name or + self.view != key["view"].name): return - logger.debug('Reading %s.', key['name']) - if key['calibration'] == 'brightness_temperature': - variable = self.nc['{}_BT_{}{}'.format(self.channel, self.stripe, self.view[0])] + logger.debug("Reading %s.", key["name"]) + if key["calibration"] == "brightness_temperature": + variable = self.nc["{}_BT_{}{}".format(self.channel, self.stripe, self.view[0])] else: - variable = self.nc['{}_radiance_{}{}'.format(self.channel, self.stripe, self.view[0])] + variable = self.nc["{}_radiance_{}{}".format(self.channel, self.stripe, self.view[0])] radiances = self._apply_radiance_adjustment(variable) - units = variable.attrs['units'] - if key['calibration'] == 'reflectance': + units = variable.attrs["units"] + if key["calibration"] == "reflectance": # TODO take into account sun-earth distance - solar_flux = self.cal[re.sub('_[^_]*$', '', key['name']) + '_solar_irradiances'] - d_index = self.indices['detector_{}{}'.format(self.stripe, self.view[0])] - idx = 0 if self.view[0] == 'n' else 1 # 0: Nadir view, 1: oblique (check). + solar_flux = self.cal[re.sub("_[^_]*$", "", key["name"]) + "_solar_irradiances"] + d_index = self.indices["detector_{}{}".format(self.stripe, self.view[0])] + idx = 0 if self.view[0] == "n" else 1 # 0: Nadir view, 1: oblique (check). radiances.data = da.map_blocks( self._cal_rad, radiances.data, d_index.data, solar_flux=solar_flux[:, idx].values) radiances *= np.pi * 100 - units = '%' + units = "%" info = info.copy() info.update(radiances.attrs) @@ -224,12 +224,12 @@ def get_dataset(self, key, info): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" - return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") class NCSLSTRAngles(BaseFileHandler): @@ -240,8 +240,8 @@ def _loadcart(self, fname): cartf = xr.open_dataset(fname, decode_cf=True, mask_and_scale=True, - chunks={'columns': CHUNK_SIZE, - 'rows': CHUNK_SIZE}) + chunks={"columns": CHUNK_SIZE, + "rows": CHUNK_SIZE}) return cartf def __init__(self, filename, filename_info, filetype_info): @@ -252,57 +252,57 @@ def __init__(self, filename, filename_info, filetype_info): self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, - chunks={'columns': CHUNK_SIZE, - 'rows': CHUNK_SIZE}) + chunks={"columns": CHUNK_SIZE, + "rows": CHUNK_SIZE}) # TODO: get metadata from the manifest file (xfdumanifest.xml) - self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] - self.sensor = 'slstr' - self.view = filename_info['view'] - self._start_time = filename_info['start_time'] - self._end_time = filename_info['end_time'] + self.platform_name = PLATFORM_NAMES[filename_info["mission_id"]] + self.sensor = "slstr" + self.view = filename_info["view"] + self._start_time = filename_info["start_time"] + self._end_time = filename_info["end_time"] carta_file = os.path.join( - os.path.dirname(self.filename), 'cartesian_a{}.nc'.format(self.view[0])) + os.path.dirname(self.filename), "cartesian_a{}.nc".format(self.view[0])) carti_file = os.path.join( - os.path.dirname(self.filename), 'cartesian_i{}.nc'.format(self.view[0])) + os.path.dirname(self.filename), "cartesian_i{}.nc".format(self.view[0])) cartx_file = os.path.join( - os.path.dirname(self.filename), 'cartesian_tx.nc') + os.path.dirname(self.filename), "cartesian_tx.nc") self.carta = self._loadcart(carta_file) self.carti = self._loadcart(carti_file) self.cartx = self._loadcart(cartx_file) def get_dataset(self, key, info): """Load a dataset.""" - if not key['view'].name.startswith(self.view[0]): + if not key["view"].name.startswith(self.view[0]): return - logger.debug('Reading %s.', key['name']) + logger.debug("Reading %s.", key["name"]) # Check if file_key is specified in the yaml - file_key = info['file_key'].format(view=key['view'].name[0]) + file_key = info["file_key"].format(view=key["view"].name[0]) variable = self.nc[file_key] - l_step = self.nc.attrs.get('al_subsampling_factor', 1) - c_step = self.nc.attrs.get('ac_subsampling_factor', 16) + l_step = self.nc.attrs.get("al_subsampling_factor", 1) + c_step = self.nc.attrs.get("ac_subsampling_factor", 16) - if key.get('resolution', 1000) == 500: + if key.get("resolution", 1000) == 500: l_step *= 2 c_step *= 2 if c_step != 1 or l_step != 1: - logger.debug('Interpolating %s.', key['name']) + logger.debug("Interpolating %s.", key["name"]) # TODO: do it in cartesian coordinates ! pbs at date line and # possible - tie_x = self.cartx['x_tx'].data[0, :][::-1] - tie_y = self.cartx['y_tx'].data[:, 0] - if key.get('resolution', 1000) == 500: - full_x = self.carta['x_a' + self.view[0]].data - full_y = self.carta['y_a' + self.view[0]].data + tie_x = self.cartx["x_tx"].data[0, :][::-1] + tie_y = self.cartx["y_tx"].data[:, 0] + if key.get("resolution", 1000) == 500: + full_x = self.carta["x_a" + self.view[0]].data + full_y = self.carta["y_a" + self.view[0]].data else: - full_x = self.carti['x_i' + self.view[0]].data - full_y = self.carti['y_i' + self.view[0]].data + full_x = self.carti["x_i" + self.view[0]].data + full_y = self.carti["y_i" + self.view[0]].data variable = variable.fillna(0) - variable.attrs['resolution'] = key.get('resolution', 1000) + variable.attrs["resolution"] = key.get("resolution", 1000) from scipy.interpolate import RectBivariateSpline spl = RectBivariateSpline( @@ -311,13 +311,13 @@ def get_dataset(self, key, info): values = spl.ev(full_y, full_x) variable = xr.DataArray(da.from_array(values, chunks=(CHUNK_SIZE, CHUNK_SIZE)), - dims=['y', 'x'], attrs=variable.attrs) + dims=["y", "x"], attrs=variable.attrs) - variable.attrs['platform_name'] = self.platform_name - variable.attrs['sensor'] = self.sensor + variable.attrs["platform_name"] = self.platform_name + variable.attrs["sensor"] = self.sensor - if 'units' not in variable.attrs: - variable.attrs['units'] = 'degrees' + if "units" not in variable.attrs: + variable.attrs["units"] = "degrees" variable.attrs.update(key.to_dict()) @@ -326,12 +326,12 @@ def get_dataset(self, key, info): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" - return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") class NCSLSTRFlag(BaseFileHandler): @@ -344,24 +344,24 @@ def __init__(self, filename, filename_info, filetype_info): self.nc = xr.open_dataset(self.filename, decode_cf=True, mask_and_scale=True, - chunks={'columns': CHUNK_SIZE, - 'rows': CHUNK_SIZE}) - self.nc = self.nc.rename({'columns': 'x', 'rows': 'y'}) - self.stripe = filename_info['stripe'] - views = {'n': 'nadir', 'o': 'oblique'} - self.view = views[filename_info['view']] + chunks={"columns": CHUNK_SIZE, + "rows": CHUNK_SIZE}) + self.nc = self.nc.rename({"columns": "x", "rows": "y"}) + self.stripe = filename_info["stripe"] + views = {"n": "nadir", "o": "oblique"} + self.view = views[filename_info["view"]] # TODO: get metadata from the manifest file (xfdumanifest.xml) - self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] - self.sensor = 'slstr' + self.platform_name = PLATFORM_NAMES[filename_info["mission_id"]] + self.sensor = "slstr" def get_dataset(self, key, info): """Load a dataset.""" - if (self.stripe != key['stripe'].name or - self.view != key['view'].name): + if (self.stripe != key["stripe"].name or + self.view != key["view"].name): return - logger.debug('Reading %s.', key['name']) - file_key = info['file_key'].format(view=key['view'].name[0], - stripe=key['stripe'].name) + logger.debug("Reading %s.", key["name"]) + file_key = info["file_key"].format(view=key["view"].name[0], + stripe=key["stripe"].name) variable = self.nc[file_key] info = info.copy() @@ -376,9 +376,9 @@ def get_dataset(self, key, info): @property def start_time(self): """Get the start time.""" - return datetime.strptime(self.nc.attrs['start_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["start_time"], "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get the end time.""" - return datetime.strptime(self.nc.attrs['stop_time'], '%Y-%m-%dT%H:%M:%S.%fZ') + return datetime.strptime(self.nc.attrs["stop_time"], "%Y-%m-%dT%H:%M:%S.%fZ") diff --git a/satpy/readers/smos_l2_wind.py b/satpy/readers/smos_l2_wind.py index 116ac39756..c982397c3c 100644 --- a/satpy/readers/smos_l2_wind.py +++ b/satpy/readers/smos_l2_wind.py @@ -41,22 +41,22 @@ class SMOSL2WINDFileHandler(NetCDF4FileHandler): @property def start_time(self): """Get start time.""" - return datetime.strptime(self['/attr/time_coverage_start'], "%Y-%m-%dT%H:%M:%S Z") + return datetime.strptime(self["/attr/time_coverage_start"], "%Y-%m-%dT%H:%M:%S Z") @property def end_time(self): """Get end time.""" - return datetime.strptime(self['/attr/time_coverage_end'], "%Y-%m-%dT%H:%M:%S Z") + return datetime.strptime(self["/attr/time_coverage_end"], "%Y-%m-%dT%H:%M:%S Z") @property def platform_shortname(self): """Get platform shortname.""" - return self.filename_info['platform_shortname'] + return self.filename_info["platform_shortname"] @property def platform_name(self): """Get platform.""" - return self['/attr/platform'] + return self["/attr/platform"] def get_metadata(self, data, ds_info): """Get metadata.""" @@ -64,12 +64,12 @@ def get_metadata(self, data, ds_info): metadata.update(data.attrs) metadata.update(ds_info) metadata.update({ - 'platform_shortname': self.platform_shortname, - 'platform_name': self.platform_name, - 'sensor': self['/attr/instrument'], - 'start_time': self.start_time, - 'end_time': self.end_time, - 'level': self['/attr/processing_level'], + "platform_shortname": self.platform_shortname, + "platform_name": self.platform_name, + "sensor": self["/attr/instrument"], + "start_time": self.start_time, + "end_time": self.end_time, + "level": self["/attr/processing_level"], }) return metadata @@ -88,16 +88,16 @@ def available_datasets(self, configured_datasets=None): continue handled_variables.add(var_name) new_info = { - 'name': var_name, - 'file_type': self.filetype_info['file_type'], + "name": var_name, + "file_type": self.filetype_info["file_type"], } yield True, new_info def _mask_dataset(self, data): """Mask out fill values.""" try: - fill = data.attrs['_FillValue'] - data.attrs['_FillValue'] = np.nan + fill = data.attrs["_FillValue"] + data.attrs["_FillValue"] = np.nan return data.where(data != fill) except KeyError: return data @@ -110,11 +110,11 @@ def _adjust_lon_coord(self, data): def _rename_coords(self, data): """Rename coords.""" rename_dict = {} - if 'lon' in data.dims: + if "lon" in data.dims: data = self._adjust_lon_coord(data) - rename_dict['lon'] = 'x' - if 'lat' in data.dims: - rename_dict['lat'] = 'y' + rename_dict["lon"] = "x" + if "lat" in data.dims: + rename_dict["lat"] = "y" # Rename the coordinates to x and y return data.rename(rename_dict) @@ -123,39 +123,39 @@ def _remove_time_coordinate(self, data): # Remove dimension where size is 1, eg. time data = data.squeeze() # Remove if exists time as coordinate - if 'time' in data.coords: - data = data.drop_vars('time') + if "time" in data.coords: + data = data.drop_vars("time") return data def _roll_dataset_lon_coord(self, data): """Roll dataset along the lon coordinate.""" - if 'lon' in data.dims: + if "lon" in data.dims: data = data.roll(lon=720, roll_coords=True) return data def get_dataset(self, ds_id, ds_info): """Get dataset.""" - data = self[ds_id['name']] + data = self[ds_id["name"]] data.attrs = self.get_metadata(data, ds_info) data = self._remove_time_coordinate(data) data = self._roll_dataset_lon_coord(data) data = self._rename_coords(data) data = self._mask_dataset(data) - if len(data.dims) >= 2 and all([dim in data.dims for dim in ['x', 'y']]): + if len(data.dims) >= 2 and all([dim in data.dims for dim in ["x", "y"]]): # Remove the first and last row as these values extends beyond +-90 latitude # if the dataset contains the y dimmension. # As this is data over open sea these has no values. data = data.where((data.y > -90.0) & (data.y < 90.0), drop=True) - elif len(data.dims) == 1 and 'y' in data.dims: + elif len(data.dims) == 1 and "y" in data.dims: data = data.where((data.y > 0) & (data.y < len(data.y) - 1), drop=True) return data def _create_area_extent(self, width, height): """Create area extent.""" # Creating a meshgrid, not needed actually, but makes it easy to find extremes - _lon = self._adjust_lon_coord(self['lon']) + _lon = self._adjust_lon_coord(self["lon"]) _lon = self._roll_dataset_lon_coord(_lon) - latlon = np.meshgrid(_lon, self['lat'][1:self['lat/shape'][0] - 1]) + latlon = np.meshgrid(_lon, self["lat"][1:self["lat/shape"][0] - 1]) lower_left_x = latlon[0][height - 1][0] - 0.125 lower_left_y = latlon[1][height - 1][0] + 0.125 upper_right_y = latlon[1][1][width - 1] - 0.125 @@ -164,12 +164,12 @@ def _create_area_extent(self, width, height): def get_area_def(self, dsid): """Define AreaDefintion.""" - width = self['lon/shape'][0] - height = self['lat/shape'][0] - 2 + width = self["lon/shape"][0] + height = self["lat/shape"][0] - 2 area_extent = self._create_area_extent(width, height) description = "SMOS L2 Wind Equirectangular Projection" - area_id = 'smos_eqc' - proj_id = 'equirectangular' - proj_dict = {'init': self['/attr/geospatial_bounds_vertical_crs']} + area_id = "smos_eqc" + proj_id = "equirectangular" + proj_dict = {"init": self["/attr/geospatial_bounds_vertical_crs"]} area_def = AreaDefinition(area_id, description, proj_id, proj_dict, width, height, area_extent, ) return area_def diff --git a/satpy/readers/tropomi_l2.py b/satpy/readers/tropomi_l2.py index c6dda4bd89..768ca70948 100644 --- a/satpy/readers/tropomi_l2.py +++ b/satpy/readers/tropomi_l2.py @@ -40,7 +40,7 @@ from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) -DATE_FMT = '%Y-%m-%dT%H:%M:%SZ' +DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" CHUNK_SIZE = get_legacy_chunk_size() @@ -50,32 +50,32 @@ class TROPOMIL2FileHandler(NetCDF4FileHandler): @property def start_time(self): """Get start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def platform_shortname(self): """Get platform shortname.""" - return self.filename_info['platform_shortname'] + return self.filename_info["platform_shortname"] @property def time_coverage_start(self): """Get time_coverage_start.""" - return datetime.strptime(self['/attr/time_coverage_start'], DATE_FMT) + return datetime.strptime(self["/attr/time_coverage_start"], DATE_FMT) @property def time_coverage_end(self): """Get time_coverage_end.""" - return datetime.strptime(self['/attr/time_coverage_end'], DATE_FMT) + return datetime.strptime(self["/attr/time_coverage_end"], DATE_FMT) @property def sensor(self): """Get sensor.""" - res = self['/attr/sensor'] + res = self["/attr/sensor"] if isinstance(res, np.ndarray): return str(res.astype(str)).lower() return res.lower() @@ -93,7 +93,7 @@ def available_datasets(self, configured_datasets=None): lat_shape = None for var_name, _val in self.file_content.items(): # Could probably avoid this hardcoding, will think on it - if (var_name == 'PRODUCT/latitude'): + if (var_name == "PRODUCT/latitude"): lat_shape = self[var_name + "/shape"] break @@ -102,19 +102,19 @@ def available_datasets(self, configured_datasets=None): # update previously configured datasets logger.debug("Starting previously configured variables loop...") # if bounds exists, we can assemble them later - bounds_exist = 'latitude_bounds' in self and 'longitude_bounds' in self + bounds_exist = "latitude_bounds" in self and "longitude_bounds" in self for is_avail, ds_info in (configured_datasets or []): # some other file handler knows how to load this if is_avail is not None: yield is_avail, ds_info - var_name = ds_info.get('file_key', ds_info['name']) + var_name = ds_info.get("file_key", ds_info["name"]) # logger.debug("Evaluating previously configured variable: %s", var_name) - matches = self.file_type_matches(ds_info['file_type']) + matches = self.file_type_matches(ds_info["file_type"]) # we can confidently say that we can provide this dataset and can # provide more info - assembled = var_name in ['assembled_lat_bounds', 'assembled_lon_bounds'] + assembled = var_name in ["assembled_lat_bounds", "assembled_lon_bounds"] if (matches and var_name in self) or (assembled and bounds_exist): logger.debug("Handling previously configured variable: %s", var_name) if not assembled: @@ -150,20 +150,20 @@ def _iterate_over_dataset_contents(self, handled_variables, shape): logger.debug("Already handled, skipping: %s", var_name) continue handled_variables.add(var_name) - last_index_separator = var_name.rindex('/') + last_index_separator = var_name.rindex("/") last_index_separator = last_index_separator + 1 var_name_no_path = var_name[last_index_separator:] logger.debug("Using short name of: %s", var_name_no_path) # Create new ds_info object - if var_name_no_path in ['latitude_bounds', 'longitude_bounds']: + if var_name_no_path in ["latitude_bounds", "longitude_bounds"]: coordinates = [] else: - coordinates = ['longitude', 'latitude'] + coordinates = ["longitude", "latitude"] new_info = { - 'name': var_name_no_path, - 'file_key': var_name, - 'coordinates': coordinates, - 'file_type': self.filetype_info['file_type'], + "name": var_name_no_path, + "file_key": var_name, + "coordinates": coordinates, + "file_type": self.filetype_info["file_type"], } yield True, new_info @@ -173,12 +173,12 @@ def get_metadata(self, data, ds_info): metadata.update(data.attrs) metadata.update(ds_info) metadata.update({ - 'platform_shortname': self.platform_shortname, - 'sensor': self.sensor, - 'start_time': self.start_time, - 'end_time': self.end_time, - 'time_coverage_start': self.time_coverage_start, - 'time_coverage_end': self.time_coverage_end, + "platform_shortname": self.platform_shortname, + "sensor": self.sensor, + "start_time": self.start_time, + "end_time": self.end_time, + "time_coverage_start": self.time_coverage_start, + "time_coverage_end": self.time_coverage_end, }) return metadata @@ -186,10 +186,10 @@ def get_metadata(self, data, ds_info): def _rename_dims(self, data_arr): """Normalize dimension names with the rest of Satpy.""" dims_dict = {} - if 'ground_pixel' in data_arr.dims: - dims_dict['ground_pixel'] = 'x' - if 'scanline' in data_arr.dims: - dims_dict['scanline'] = 'y' + if "ground_pixel" in data_arr.dims: + dims_dict["ground_pixel"] = "x" + if "scanline" in data_arr.dims: + dims_dict["scanline"] = "y" return data_arr.rename(dims_dict) def prepare_geo(self, bounds_data): @@ -220,18 +220,18 @@ def prepare_geo(self, bounds_data): # Convert to DataArray dask_dest = da.from_array(dest, chunks=CHUNK_SIZE) dest = xr.DataArray(dask_dest, - dims=('y_bounds', 'x_bounds'), + dims=("y_bounds", "x_bounds"), attrs=bounds_data.attrs ) return dest def get_dataset(self, ds_id, ds_info): """Get dataset.""" - logger.debug("Getting data for: %s", ds_id['name']) - file_key = ds_info.get('file_key', ds_id['name']) + logger.debug("Getting data for: %s", ds_id["name"]) + file_key = ds_info.get("file_key", ds_id["name"]) data = self[file_key] data.attrs = self.get_metadata(data, ds_info) - fill_value = data.attrs.get('_FillValue', np.float32(np.nan)) + fill_value = data.attrs.get("_FillValue", np.float32(np.nan)) data = data.squeeze() # preserve integer data types if possible @@ -239,11 +239,11 @@ def get_dataset(self, ds_id, ds_info): new_fill = fill_value else: new_fill = np.float32(np.nan) - data.attrs.pop('_FillValue', None) + data.attrs.pop("_FillValue", None) good_mask = data != fill_value - scale_factor = data.attrs.get('scale_factor') - add_offset = data.attrs.get('add_offset') + scale_factor = data.attrs.get("scale_factor") + add_offset = data.attrs.get("add_offset") if scale_factor is not None: data = data * scale_factor + add_offset @@ -251,11 +251,11 @@ def get_dataset(self, ds_id, ds_info): data = self._rename_dims(data) # drop coords whose units are not meters - drop_list = ['y', 'x', 'layer', 'vertices'] + drop_list = ["y", "x", "layer", "vertices"] coords_exist = [coord for coord in drop_list if coord in data.coords] if coords_exist: data = data.drop_vars(coords_exist) - if ds_id['name'] in ['assembled_lat_bounds', 'assembled_lon_bounds']: + if ds_id["name"] in ["assembled_lat_bounds", "assembled_lon_bounds"]: data = self.prepare_geo(data) return data diff --git a/satpy/readers/utils.py b/satpy/readers/utils.py index e2035af479..18c7193a43 100644 --- a/satpy/readers/utils.py +++ b/satpy/readers/utils.py @@ -53,7 +53,7 @@ def np2str(value): type `numpy.string_` or it is not a numpy array """ - if hasattr(value, 'dtype') and \ + if hasattr(value, "dtype") and \ issubclass(value.dtype.type, (np.str_, np.bytes_, np.object_)) \ and value.size == 1: value = value.item() @@ -68,13 +68,13 @@ def np2str(value): def _get_geostationary_height(geos_area): params = geos_area.crs.coordinate_operation.params - h_param = [p for p in params if 'satellite height' in p.name.lower()][0] + h_param = [p for p in params if "satellite height" in p.name.lower()][0] return h_param.value def _get_geostationary_reference_longitude(geos_area): params = geos_area.crs.coordinate_operation.params - lon_0_params = [p for p in params if 'longitude of natural origin' in p.name.lower()] + lon_0_params = [p for p in params if "longitude of natural origin" in p.name.lower()] if not lon_0_params: return 0 elif len(lon_0_params) != 1: @@ -232,7 +232,7 @@ def _unzip_local_file(filename: str, prefix=None): Temporary filename path for decompressed file or None. """ - if not os.fspath(filename).endswith('bz2'): + if not os.fspath(filename).endswith("bz2"): return None fdn, tmpfilepath = tempfile.mkstemp(prefix=prefix, dir=config["tmp_dir"]) @@ -248,19 +248,19 @@ def _unzip_local_file(filename: str, prefix=None): def _unzip_with_pbzip(filename, tmpfilepath, fdn): # try pbzip2 - pbzip = which('pbzip2') + pbzip = which("pbzip2") if pbzip is None: return None # Run external pbzip2 - n_thr = os.environ.get('OMP_NUM_THREADS') + n_thr = os.environ.get("OMP_NUM_THREADS") if n_thr: runner = [pbzip, - '-dc', - '-p'+str(n_thr), + "-dc", + "-p"+str(n_thr), filename] else: runner = [pbzip, - '-dc', + "-dc", filename] p = Popen(runner, stdout=PIPE, stderr=PIPE) # nosec stdout = BytesIO(p.communicate()[0]) @@ -268,7 +268,7 @@ def _unzip_with_pbzip(filename, tmpfilepath, fdn): if status != 0: raise IOError("pbzip2 error '%s', failed, status=%d" % (filename, status)) - with closing(os.fdopen(fdn, 'wb')) as ofpt: + with closing(os.fdopen(fdn, "wb")) as ofpt: try: stdout.seek(0) shutil.copyfileobj(stdout, ofpt) @@ -291,7 +291,7 @@ def _unzip_with_bz2(filename, tmpfilepath): def _write_uncompressed_file(content, fdn, filename, tmpfilepath): - with closing(os.fdopen(fdn, 'wb')) as ofpt: + with closing(os.fdopen(fdn, "wb")) as ofpt: try: ofpt.write(content) except IOError: @@ -348,7 +348,7 @@ def generic_open(filename, *args, **kwargs): Returns a file-like object. """ - if os.fspath(filename).endswith('.bz2'): + if os.fspath(filename).endswith(".bz2"): fp = bz2.open(filename, *args, **kwargs) else: try: @@ -413,8 +413,8 @@ def get_user_calibration_factors(band_name, correction_dict): """Retrieve radiance correction factors from user-supplied dict.""" if band_name in correction_dict: try: - slope = correction_dict[band_name]['slope'] - offset = correction_dict[band_name]['offset'] + slope = correction_dict[band_name]["slope"] + offset = correction_dict[band_name]["offset"] except KeyError: raise KeyError("Incorrect correction factor dictionary. You must " "supply 'slope' and 'offset' keys.") @@ -440,13 +440,13 @@ def get_array_date(scn_data, utc_date=None): """Get start time from a channel data array.""" if utc_date is None: try: - utc_date = scn_data.attrs['start_time'] + utc_date = scn_data.attrs["start_time"] except KeyError: try: - utc_date = scn_data.attrs['scheduled_time'] + utc_date = scn_data.attrs["scheduled_time"] except KeyError: - raise KeyError('Scene has no start_time ' - 'or scheduled_time attribute.') + raise KeyError("Scene has no start_time " + "or scheduled_time attribute.") return utc_date @@ -456,8 +456,8 @@ def apply_earthsun_distance_correction(reflectance, utc_date=None): utc_date = get_array_date(reflectance, utc_date) sun_earth_dist = sun_earth_distance_correction(utc_date) - reflectance.attrs['sun_earth_distance_correction_applied'] = True - reflectance.attrs['sun_earth_distance_correction_factor'] = sun_earth_dist + reflectance.attrs["sun_earth_distance_correction_applied"] = True + reflectance.attrs["sun_earth_distance_correction_factor"] = sun_earth_dist with xr.set_options(keep_attrs=True): reflectance = reflectance * sun_earth_dist * sun_earth_dist return reflectance @@ -469,8 +469,8 @@ def remove_earthsun_distance_correction(reflectance, utc_date=None): utc_date = get_array_date(reflectance, utc_date) sun_earth_dist = sun_earth_distance_correction(utc_date) - reflectance.attrs['sun_earth_distance_correction_applied'] = False - reflectance.attrs['sun_earth_distance_correction_factor'] = sun_earth_dist + reflectance.attrs["sun_earth_distance_correction_applied"] = False + reflectance.attrs["sun_earth_distance_correction_factor"] = sun_earth_dist with xr.set_options(keep_attrs=True): reflectance = reflectance / (sun_earth_dist * sun_earth_dist) return reflectance diff --git a/satpy/readers/vaisala_gld360.py b/satpy/readers/vaisala_gld360.py index 735eb6b3c9..b0bde01573 100644 --- a/satpy/readers/vaisala_gld360.py +++ b/satpy/readers/vaisala_gld360.py @@ -48,11 +48,11 @@ def __init__(self, filename, filename_info, filetype_info): """Initialize VaisalaGLD360TextFileHandler.""" super(VaisalaGLD360TextFileHandler, self).__init__(filename, filename_info, filetype_info) - names = ['gld360_date', 'gld360_time', 'latitude', 'longitude', 'power', 'unit'] - types = ['str', 'str', 'float', 'float', 'float', 'str'] + names = ["gld360_date", "gld360_time", "latitude", "longitude", "power", "unit"] + types = ["str", "str", "float", "float", "float", "str"] dtypes = dict(zip(names, types)) # Combine 'date' and 'time' into a datetime object - parse_dates = {'time': ['gld360_date', 'gld360_time']} + parse_dates = {"time": ["gld360_date", "gld360_time"]} self.data = pd.read_csv(filename, delim_whitespace=True, header=None, names=names, dtype=dtypes, parse_dates=parse_dates) @@ -60,28 +60,28 @@ def __init__(self, filename, filename_info, filetype_info): @property def start_time(self): """Get start time.""" - return self.data['time'].iloc[0] + return self.data["time"].iloc[0] @property def end_time(self): """Get end time.""" - return self.data['time'].iloc[-1] + return self.data["time"].iloc[-1] def get_dataset(self, dataset_id, dataset_info): """Load a dataset.""" - xarr = xr.DataArray(da.from_array(self.data[dataset_id['name']], + xarr = xr.DataArray(da.from_array(self.data[dataset_id["name"]], chunks=CHUNK_SIZE), dims=["y"]) # Add time, longitude, and latitude as non-dimensional y-coordinates - xarr['time'] = ('y', self.data['time']) - xarr['longitude'] = ('y', self.data['longitude']) - xarr['latitude'] = ('y', self.data['latitude']) + xarr["time"] = ("y", self.data["time"]) + xarr["longitude"] = ("y", self.data["longitude"]) + xarr["latitude"] = ("y", self.data["latitude"]) - if dataset_id['name'] == 'power': + if dataset_id["name"] == "power": # Check that units in the file match the unit specified in the # reader yaml-file - if not (self.data.unit == dataset_info['units']).all(): - raise ValueError('Inconsistent units found in file!') + if not (self.data.unit == dataset_info["units"]).all(): + raise ValueError("Inconsistent units found in file!") xarr.attrs.update(dataset_info) return xarr diff --git a/satpy/readers/vii_base_nc.py b/satpy/readers/vii_base_nc.py index e51024ba56..83056189dc 100644 --- a/satpy/readers/vii_base_nc.py +++ b/satpy/readers/vii_base_nc.py @@ -46,14 +46,14 @@ def __init__(self, filename, filename_info, filetype_info, orthorect=False): super().__init__(filename, filename_info, filetype_info, auto_maskandscale=True) # Saves the orthorectification flag - self.orthorect = orthorect and filetype_info.get('orthorect', True) + self.orthorect = orthorect and filetype_info.get("orthorect", True) # Saves the interpolation flag - self.interpolate = filetype_info.get('interpolate', True) + self.interpolate = filetype_info.get("interpolate", True) try: - longitude = self[filetype_info['cached_longitude']] - latitude = self[filetype_info['cached_latitude']] + longitude = self[filetype_info["cached_longitude"]] + latitude = self[filetype_info["cached_latitude"]] if self.interpolate: self.longitude, self.latitude = self._perform_geo_interpolation(longitude, latitude) @@ -66,22 +66,22 @@ def __init__(self, filename, filename_info, filetype_info, orthorect=False): def _standardize_dims(self, variable): """Standardize dims to y, x.""" - if 'num_pixels' in variable.dims: - variable = variable.rename({'num_pixels': 'x', 'num_lines': 'y'}) - if 'num_points_act' in variable.dims: - variable = variable.rename({'num_points_act': 'x', 'num_points_alt': 'y'}) - if variable.dims[0] == 'x': - variable = variable.transpose('y', 'x') + if "num_pixels" in variable.dims: + variable = variable.rename({"num_pixels": "x", "num_lines": "y"}) + if "num_points_act" in variable.dims: + variable = variable.rename({"num_points_act": "x", "num_points_alt": "y"}) + if variable.dims[0] == "x": + variable = variable.transpose("y", "x") return variable def get_dataset(self, dataset_id, dataset_info): """Get dataset using file_key in dataset_info.""" - var_key = dataset_info['file_key'] - logger.debug('Reading in file to get dataset with key %s.', var_key) + var_key = dataset_info["file_key"] + logger.debug("Reading in file to get dataset with key %s.", var_key) - if var_key == 'cached_longitude' and self.longitude is not None: + if var_key == "cached_longitude" and self.longitude is not None: variable = self.longitude.copy() - elif var_key == 'cached_latitude' and self.latitude is not None: + elif var_key == "cached_latitude" and self.latitude is not None: variable = self.latitude.copy() else: try: @@ -91,21 +91,21 @@ def get_dataset(self, dataset_id, dataset_info): return None # If the dataset is marked for interpolation, perform the interpolation from tie points to pixels - if dataset_info.get('interpolate', False) and self.interpolate: + if dataset_info.get("interpolate", False) and self.interpolate: variable = self._perform_interpolation(variable) # Perform the calibration if required - if dataset_info.get('calibration') is not None: + if dataset_info.get("calibration") is not None: variable = self._perform_calibration(variable, dataset_info) # Perform the orthorectification if required if self.orthorect: - orthorect_data_name = dataset_info.get('orthorect_data', None) + orthorect_data_name = dataset_info.get("orthorect_data", None) if orthorect_data_name is not None: variable = self._perform_orthorectification(variable, orthorect_data_name) # Manage the attributes of the dataset - variable.attrs.setdefault('units', None) + variable.attrs.setdefault("units", None) variable.attrs.update(dataset_info) variable.attrs.update(self._get_global_attributes()) @@ -130,8 +130,8 @@ def _perform_interpolation(variable): TIE_POINTS_FACTOR )[0] new_variable = interpolated_values.rename( - num_tie_points_act='num_pixels', - num_tie_points_alt='num_lines' + num_tie_points_act="num_pixels", + num_tie_points_alt="num_lines" ) new_variable.name = variable.name new_variable.attrs = variable.attrs @@ -157,14 +157,14 @@ def _perform_geo_interpolation(longitude, latitude): TIE_POINTS_FACTOR ) new_longitude = interpolated_longitude.rename( - num_tie_points_act='num_pixels', - num_tie_points_alt='num_lines' + num_tie_points_act="num_pixels", + num_tie_points_alt="num_lines" ) new_longitude.name = longitude.name new_longitude.attrs = longitude.attrs new_latitude = interpolated_latitude.rename( - num_tie_points_act='num_pixels', - num_tie_points_alt='num_lines' + num_tie_points_act="num_pixels", + num_tie_points_alt="num_lines" ) new_latitude.name = latitude.name new_latitude.attrs = latitude.attrs @@ -181,20 +181,20 @@ def _perform_calibration(self, variable, dataset_info): def _get_global_attributes(self): """Create a dictionary of global attributes to be added to all datasets.""" attributes = { - 'filename': self.filename, - 'start_time': self.start_time, - 'end_time': self.end_time, - 'spacecraft_name': self.spacecraft_name, - 'ssp_lon': self.ssp_lon, - 'sensor': self.sensor, - 'filename_start_time': self.filename_info['sensing_start_time'], - 'filename_end_time': self.filename_info['sensing_end_time'], - 'platform_name': self.spacecraft_name, + "filename": self.filename, + "start_time": self.start_time, + "end_time": self.end_time, + "spacecraft_name": self.spacecraft_name, + "ssp_lon": self.ssp_lon, + "sensor": self.sensor, + "filename_start_time": self.filename_info["sensing_start_time"], + "filename_end_time": self.filename_info["sensing_end_time"], + "platform_name": self.spacecraft_name, } # Add a "quality_group" item to the dictionary with all the variables and attributes # which are found in the 'quality' group of the VII product - quality_group = self['quality'] + quality_group = self["quality"] quality_dict = {} for key in quality_group: # Add the values (as Numpy array) of each variable in the group where possible @@ -205,7 +205,7 @@ def _get_global_attributes(self): # Add the attributes of the quality group quality_dict.update(quality_group.attrs) - attributes['quality_group'] = quality_dict + attributes["quality_group"] = quality_dict return attributes @@ -213,29 +213,29 @@ def _get_global_attributes(self): def start_time(self): """Get observation start time.""" try: - start_time = datetime.strptime(self['/attr/sensing_start_time_utc'], '%Y%m%d%H%M%S.%f') + start_time = datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y%m%d%H%M%S.%f") except ValueError: - start_time = datetime.strptime(self['/attr/sensing_start_time_utc'], '%Y-%m-%d %H:%M:%S.%f') + start_time = datetime.strptime(self["/attr/sensing_start_time_utc"], "%Y-%m-%d %H:%M:%S.%f") return start_time @property def end_time(self): """Get observation end time.""" try: - end_time = datetime.strptime(self['/attr/sensing_end_time_utc'], '%Y%m%d%H%M%S.%f') + end_time = datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y%m%d%H%M%S.%f") except ValueError: - end_time = datetime.strptime(self['/attr/sensing_end_time_utc'], '%Y-%m-%d %H:%M:%S.%f') + end_time = datetime.strptime(self["/attr/sensing_end_time_utc"], "%Y-%m-%d %H:%M:%S.%f") return end_time @property def spacecraft_name(self): """Return spacecraft name.""" - return self['/attr/spacecraft'] + return self["/attr/spacecraft"] @property def sensor(self): """Return sensor.""" - return self['/attr/instrument'] + return self["/attr/instrument"] @property def ssp_lon(self): diff --git a/satpy/readers/vii_l1b_nc.py b/satpy/readers/vii_l1b_nc.py index 2e66c3deb0..2dbcb63eda 100644 --- a/satpy/readers/vii_l1b_nc.py +++ b/satpy/readers/vii_l1b_nc.py @@ -44,14 +44,14 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): super().__init__(filename, filename_info, filetype_info, **kwargs) # Read the variables which are required for the calibration - self._bt_conversion_a = self['data/calibration_data/bt_conversion_a'].values - self._bt_conversion_b = self['data/calibration_data/bt_conversion_b'].values - self._channel_cw_thermal = self['data/calibration_data/channel_cw_thermal'].values - self._integrated_solar_irradiance = self['data/calibration_data/Band_averaged_solar_irradiance'].values + self._bt_conversion_a = self["data/calibration_data/bt_conversion_a"].values + self._bt_conversion_b = self["data/calibration_data/bt_conversion_b"].values + self._channel_cw_thermal = self["data/calibration_data/channel_cw_thermal"].values + self._integrated_solar_irradiance = self["data/calibration_data/Band_averaged_solar_irradiance"].values # Computes the angle factor for reflectance calibration as inverse of cosine of solar zenith angle # (the values in the product file are on tie points and in degrees, # therefore interpolation and conversion to radians are required) - solar_zenith_angle = self['data/measurement_data/solar_zenith'] + solar_zenith_angle = self["data/measurement_data/solar_zenith"] solar_zenith_angle_on_pixels = self._perform_interpolation(solar_zenith_angle) solar_zenith_angle_on_pixels_radians = np.radians(solar_zenith_angle_on_pixels) self.angle_factor = 1.0 / (np.cos(solar_zenith_angle_on_pixels_radians)) @@ -67,27 +67,27 @@ def _perform_calibration(self, variable, dataset_info): DataArray: array containing the calibrated values and all the original metadata. """ - calibration_name = dataset_info['calibration'] - if calibration_name == 'brightness_temperature': + calibration_name = dataset_info["calibration"] + if calibration_name == "brightness_temperature": # Extract the values of calibration coefficients for the current channel - chan_index = dataset_info['chan_thermal_index'] + chan_index = dataset_info["chan_thermal_index"] cw = self._channel_cw_thermal[chan_index] a = self._bt_conversion_a[chan_index] b = self._bt_conversion_b[chan_index] # Perform the calibration calibrated_variable = self._calibrate_bt(variable, cw, a, b) calibrated_variable.attrs = variable.attrs - elif calibration_name == 'reflectance': + elif calibration_name == "reflectance": # Extract the values of calibration coefficients for the current channel - chan_index = dataset_info['chan_solar_index'] + chan_index = dataset_info["chan_solar_index"] isi = self._integrated_solar_irradiance[chan_index] # Perform the calibration calibrated_variable = self._calibrate_refl(variable, self.angle_factor.data, isi) calibrated_variable.attrs = variable.attrs - elif calibration_name == 'radiance': + elif calibration_name == "radiance": calibrated_variable = variable else: - raise ValueError("Unknown calibration %s for dataset %s" % (calibration_name, dataset_info['name'])) + raise ValueError("Unknown calibration %s for dataset %s" % (calibration_name, dataset_info["name"])) return calibrated_variable @@ -108,7 +108,7 @@ def _perform_orthorectification(self, variable, orthorect_data_name): # based on the simplified formula using mean Earth radius variable += np.degrees(orthorect_data / MEAN_EARTH_RADIUS) except KeyError: - logger.warning('Required dataset %s for orthorectification not available, skipping', orthorect_data_name) + logger.warning("Required dataset %s for orthorectification not available, skipping", orthorect_data_name) return variable @staticmethod diff --git a/satpy/readers/vii_l2_nc.py b/satpy/readers/vii_l2_nc.py index 3ce3926674..276d77f668 100644 --- a/satpy/readers/vii_l2_nc.py +++ b/satpy/readers/vii_l2_nc.py @@ -43,5 +43,5 @@ def _perform_orthorectification(self, variable, orthorect_data_name): orthorect_data = self[orthorect_data_name] variable += orthorect_data except KeyError: - logger.warning('Required dataset %s for orthorectification not available, skipping', orthorect_data_name) + logger.warning("Required dataset %s for orthorectification not available, skipping", orthorect_data_name) return variable diff --git a/satpy/readers/viirs_atms_sdr_base.py b/satpy/readers/viirs_atms_sdr_base.py index 048c601f84..be0a7a0d65 100644 --- a/satpy/readers/viirs_atms_sdr_base.py +++ b/satpy/readers/viirs_atms_sdr_base.py @@ -32,37 +32,37 @@ LOG = logging.getLogger(__name__) -VIIRS_DATASET_KEYS = {'GDNBO': 'VIIRS-DNB-GEO', - 'SVDNB': 'VIIRS-DNB-SDR', - 'GITCO': 'VIIRS-IMG-GEO-TC', - 'GIMGO': 'VIIRS-IMG-GEO', - 'SVI01': 'VIIRS-I1-SDR', - 'SVI02': 'VIIRS-I2-SDR', - 'SVI03': 'VIIRS-I3-SDR', - 'SVI04': 'VIIRS-I4-SDR', - 'SVI05': 'VIIRS-I5-SDR', - 'GMTCO': 'VIIRS-MOD-GEO-TC', - 'GMODO': 'VIIRS-MOD-GEO', - 'SVM01': 'VIIRS-M1-SDR', - 'SVM02': 'VIIRS-M2-SDR', - 'SVM03': 'VIIRS-M3-SDR', - 'SVM04': 'VIIRS-M4-SDR', - 'SVM05': 'VIIRS-M5-SDR', - 'SVM06': 'VIIRS-M6-SDR', - 'SVM07': 'VIIRS-M7-SDR', - 'SVM08': 'VIIRS-M8-SDR', - 'SVM09': 'VIIRS-M9-SDR', - 'SVM10': 'VIIRS-M10-SDR', - 'SVM11': 'VIIRS-M11-SDR', - 'SVM12': 'VIIRS-M12-SDR', - 'SVM13': 'VIIRS-M13-SDR', - 'SVM14': 'VIIRS-M14-SDR', - 'SVM15': 'VIIRS-M15-SDR', - 'SVM16': 'VIIRS-M16-SDR', - 'IVCDB': 'VIIRS-DualGain-Cal-IP'} -ATMS_DATASET_KEYS = {'SATMS': 'ATMS-SDR', - 'GATMO': 'ATMS-SDR-GEO', - 'TATMS': 'ATMS-TDR'} +VIIRS_DATASET_KEYS = {"GDNBO": "VIIRS-DNB-GEO", + "SVDNB": "VIIRS-DNB-SDR", + "GITCO": "VIIRS-IMG-GEO-TC", + "GIMGO": "VIIRS-IMG-GEO", + "SVI01": "VIIRS-I1-SDR", + "SVI02": "VIIRS-I2-SDR", + "SVI03": "VIIRS-I3-SDR", + "SVI04": "VIIRS-I4-SDR", + "SVI05": "VIIRS-I5-SDR", + "GMTCO": "VIIRS-MOD-GEO-TC", + "GMODO": "VIIRS-MOD-GEO", + "SVM01": "VIIRS-M1-SDR", + "SVM02": "VIIRS-M2-SDR", + "SVM03": "VIIRS-M3-SDR", + "SVM04": "VIIRS-M4-SDR", + "SVM05": "VIIRS-M5-SDR", + "SVM06": "VIIRS-M6-SDR", + "SVM07": "VIIRS-M7-SDR", + "SVM08": "VIIRS-M8-SDR", + "SVM09": "VIIRS-M9-SDR", + "SVM10": "VIIRS-M10-SDR", + "SVM11": "VIIRS-M11-SDR", + "SVM12": "VIIRS-M12-SDR", + "SVM13": "VIIRS-M13-SDR", + "SVM14": "VIIRS-M14-SDR", + "SVM15": "VIIRS-M15-SDR", + "SVM16": "VIIRS-M16-SDR", + "IVCDB": "VIIRS-DualGain-Cal-IP"} +ATMS_DATASET_KEYS = {"SATMS": "ATMS-SDR", + "GATMO": "ATMS-SDR-GEO", + "TATMS": "ATMS-TDR"} DATASET_KEYS = {} DATASET_KEYS.update(VIIRS_DATASET_KEYS) @@ -106,7 +106,7 @@ def _parse_datetime(self, datestr, timestr): datetime_str = (str(datestr.data.compute().astype(str)) + str(timestr.data.compute().astype(str))) - time_val = datetime.strptime(datetime_str, '%Y%m%d%H%M%S.%fZ') + time_val = datetime.strptime(datetime_str, "%Y%m%d%H%M%S.%fZ") if abs(time_val - NO_DATE) < EPSILON_TIME: # catch rare case when SDR files have incorrect date raise ValueError("Datetime invalid {}".format(time_val)) @@ -140,29 +140,29 @@ def end_orbit_number(self): def _get_aggr_path(self, fileinfo_key, aggr_default): dataset_group = DATASET_KEYS[self.datasets[0]] - default = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/' + aggr_default + default = "Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/" + aggr_default return self.filetype_info.get(fileinfo_key, default).format(dataset_group=dataset_group) @property def platform_name(self): """Get platform name.""" - default = '/attr/Platform_Short_Name' + default = "/attr/Platform_Short_Name" platform_path = self.filetype_info.get( - 'platform_name', default).format(**self.filetype_info) - platform_dict = {'NPP': 'Suomi-NPP', - 'JPSS-1': 'NOAA-20', - 'J01': 'NOAA-20', - 'JPSS-2': 'NOAA-21', - 'J02': 'NOAA-21'} + "platform_name", default).format(**self.filetype_info) + platform_dict = {"NPP": "Suomi-NPP", + "JPSS-1": "NOAA-20", + "J01": "NOAA-20", + "JPSS-2": "NOAA-21", + "J02": "NOAA-21"} return platform_dict.get(self[platform_path], self[platform_path]) @property def sensor_name(self): """Get sensor name.""" dataset_group = DATASET_KEYS[self.datasets[0]] - default = 'Data_Products/{dataset_group}/attr/Instrument_Short_Name' + default = "Data_Products/{dataset_group}/attr/Instrument_Short_Name" sensor_path = self.filetype_info.get( - 'sensor_name', default).format(dataset_group=dataset_group) + "sensor_name", default).format(dataset_group=dataset_group) return self[sensor_path].lower() def scale_swath_data(self, data, scaling_factors, dataset_group): @@ -178,7 +178,7 @@ def scale_swath_data(self, data, scaling_factors, dataset_group): def scale_data_to_specified_unit(self, data, dataset_id, ds_info): """Get sscale and offset factors and convert/scale data to given physical unit.""" var_path = self._generate_file_key(dataset_id, ds_info) - dataset_group = ds_info['dataset_group'] + dataset_group = ds_info["dataset_group"] file_units = _get_file_units(dataset_id, ds_info) output_units = ds_info.get("units", file_units) @@ -241,38 +241,38 @@ def expand_single_values(var, scans): else: expanded = np.repeat(var, scans) expanded.attrs = var.attrs - expanded.rename({expanded.dims[0]: 'y'}) + expanded.rename({expanded.dims[0]: "y"}) return expanded def _scan_size(self, dataset_group_name): """Get how many rows of data constitute one scanline.""" - if 'ATM' in dataset_group_name: + if "ATM" in dataset_group_name: scan_size = 1 - elif 'I' in dataset_group_name: + elif "I" in dataset_group_name: scan_size = 32 else: scan_size = 16 return scan_size def _generate_file_key(self, ds_id, ds_info, factors=False): - var_path = ds_info.get('file_key', 'All_Data/{dataset_group}_All/{calibration}') + var_path = ds_info.get("file_key", "All_Data/{dataset_group}_All/{calibration}") calibration = { - 'radiance': 'Radiance', - 'reflectance': 'Reflectance', - 'brightness_temperature': 'BrightnessTemperature', - }.get(ds_id.get('calibration')) - var_path = var_path.format(calibration=calibration, dataset_group=DATASET_KEYS[ds_info['dataset_group']]) - if ds_id['name'] in ['dnb_longitude', 'dnb_latitude']: + "radiance": "Radiance", + "reflectance": "Reflectance", + "brightness_temperature": "BrightnessTemperature", + }.get(ds_id.get("calibration")) + var_path = var_path.format(calibration=calibration, dataset_group=DATASET_KEYS[ds_info["dataset_group"]]) + if ds_id["name"] in ["dnb_longitude", "dnb_latitude"]: if self.use_tc is True: - return var_path + '_TC' - if self.use_tc is None and var_path + '_TC' in self.file_content: - return var_path + '_TC' + return var_path + "_TC" + if self.use_tc is None and var_path + "_TC" in self.file_content: + return var_path + "_TC" return var_path def _update_data_attributes(self, data, dataset_id, ds_info): file_units = _get_file_units(dataset_id, ds_info) output_units = ds_info.get("units", file_units) - i = getattr(data, 'attrs', {}) + i = getattr(data, "attrs", {}) i.update(ds_info) i.update({ "platform_name": self.platform_name, @@ -280,7 +280,7 @@ def _update_data_attributes(self, data, dataset_id, ds_info): "start_orbit": self.start_orbit_number, "end_orbit": self.end_orbit_number, "units": output_units, - "rows_per_scan": self._scan_size(ds_info['dataset_group']), + "rows_per_scan": self._scan_size(ds_info["dataset_group"]), }) i.update(dataset_id.to_dict()) data.attrs.update(i) @@ -304,7 +304,7 @@ def concatenate_dataset(self, dataset_group, var_path, **kwargs): data_chunks.append(variable.isel(y=slice(start_scan, start_scan + gscans * scan_size))) start_scan += gscans * scan_size - return xr.concat(data_chunks, 'y') + return xr.concat(data_chunks, "y") else: # This is not tested - Not sure this code is ever going to be used? A. Dybbroe # Mon Jan 2 13:31:21 2023 @@ -316,11 +316,11 @@ def _get_rows_per_granule(self, dataset_group): return [scan_size * gran_scans for gran_scans in scans_per_gran] def _get_scans_per_granule(self, dataset_group): - number_of_granules_path = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateNumberGranules' + number_of_granules_path = "Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateNumberGranules" nb_granules_path = number_of_granules_path.format(dataset_group=DATASET_KEYS[dataset_group]) scans = [] for granule in range(self[nb_granules_path]): - scans_path = 'Data_Products/{dataset_group}/{dataset_group}_Gran_{granule}/attr/N_Number_Of_Scans' + scans_path = "Data_Products/{dataset_group}/{dataset_group}_Gran_{granule}/attr/N_Number_Of_Scans" scans_path = scans_path.format(dataset_group=DATASET_KEYS[dataset_group], granule=granule) scans.append(self[scans_path]) return scans @@ -350,7 +350,7 @@ def available_datasets(self, configured_datasets=None): if is_avail is not None: yield is_avail, ds_info continue - dataset_group = [ds_group for ds_group in ds_info['dataset_groups'] if ds_group in self.datasets] + dataset_group = [ds_group for ds_group in ds_info["dataset_groups"] if ds_group in self.datasets] if dataset_group: yield True, ds_info elif is_avail is None: diff --git a/satpy/readers/viirs_compact.py b/satpy/readers/viirs_compact.py index b9f83e4287..af3a4ce766 100644 --- a/satpy/readers/viirs_compact.py +++ b/satpy/readers/viirs_compact.py @@ -67,9 +67,9 @@ h = 6.6260755e-34 # m2kg.s-1 k = 1.380658e-23 # m2kg.s-2.K-1 -short_names = {'NPP': 'Suomi-NPP', - 'J01': 'NOAA-20', - 'J02': 'NOAA-21'} +short_names = {"NPP": "Suomi-NPP", + "J01": "NOAA-20", + "J02": "NOAA-21"} class VIIRSCompactFileHandler(BaseFileHandler): @@ -83,28 +83,28 @@ def __init__(self, filename, filename_info, filetype_info): self.finfo = filename_info self.lons = None self.lats = None - if filetype_info['file_type'] == 'compact_m': - self.ch_type = 'MOD' - elif filetype_info['file_type'] == 'compact_dnb': - self.ch_type = 'DNB' + if filetype_info["file_type"] == "compact_m": + self.ch_type = "MOD" + elif filetype_info["file_type"] == "compact_dnb": + self.ch_type = "DNB" else: - raise IOError('Compact Viirs file type not recognized.') + raise IOError("Compact Viirs file type not recognized.") geo_data = self.h5f["Data_Products"]["VIIRS-%s-GEO" % self.ch_type]["VIIRS-%s-GEO_Gran_0" % self.ch_type] - self.min_lat = geo_data.attrs['South_Bounding_Coordinate'].item() - self.max_lat = geo_data.attrs['North_Bounding_Coordinate'].item() - self.min_lon = geo_data.attrs['West_Bounding_Coordinate'].item() - self.max_lon = geo_data.attrs['East_Bounding_Coordinate'].item() + self.min_lat = geo_data.attrs["South_Bounding_Coordinate"].item() + self.max_lat = geo_data.attrs["North_Bounding_Coordinate"].item() + self.min_lon = geo_data.attrs["West_Bounding_Coordinate"].item() + self.max_lon = geo_data.attrs["East_Bounding_Coordinate"].item() self.switch_to_cart = ((abs(self.max_lon - self.min_lon) > 90) or (max(abs(self.min_lat), abs(self.max_lat)) > 60)) self.scans = self.h5f["All_Data"]["NumberOfScans"][0] - self.geography = self.h5f["All_Data"]['VIIRS-%s-GEO_All' % self.ch_type] + self.geography = self.h5f["All_Data"]["VIIRS-%s-GEO_All" % self.ch_type] for key in self.h5f["All_Data"].keys(): if key.startswith("VIIRS") and key.endswith("SDR_All"): - channel = key.split('-')[1] + channel = key.split("-")[1] break # This supposes there is only one tiepoint zone in the track direction. @@ -134,9 +134,9 @@ def __init__(self, filename, filename_info, filetype_info): self.cache = {} self.mda = {} - short_name = np2str(self.h5f.attrs['Platform_Short_Name']) - self.mda['platform_name'] = short_names.get(short_name, short_name) - self.mda['sensor'] = 'viirs' + short_name = np2str(self.h5f.attrs["Platform_Short_Name"]) + self.mda["platform_name"] = short_names.get(short_name, short_name) + self.mda["sensor"] = "viirs" def __del__(self): """Close file handlers when we are done.""" @@ -145,75 +145,75 @@ def __del__(self): def get_dataset(self, key, info): """Load a dataset.""" - logger.debug('Reading %s.', key['name']) - if key['name'] in _channels_dict: + logger.debug("Reading %s.", key["name"]) + if key["name"] in _channels_dict: m_data = self.read_dataset(key, info) else: m_data = self.read_geo(key, info) m_data.attrs.update(info) - m_data.attrs['rows_per_scan'] = self.scan_size + m_data.attrs["rows_per_scan"] = self.scan_size return m_data def get_bounding_box(self): """Get the bounding box of the data.""" for key in self.h5f["Data_Products"].keys(): if key.startswith("VIIRS") and key.endswith("GEO"): - lats = self.h5f["Data_Products"][key][key + '_Gran_0'].attrs['G-Ring_Latitude'][()] - lons = self.h5f["Data_Products"][key][key + '_Gran_0'].attrs['G-Ring_Longitude'][()] + lats = self.h5f["Data_Products"][key][key + "_Gran_0"].attrs["G-Ring_Latitude"][()] + lons = self.h5f["Data_Products"][key][key + "_Gran_0"].attrs["G-Ring_Longitude"][()] break else: - raise KeyError('Cannot find bounding coordinates!') + raise KeyError("Cannot find bounding coordinates!") return lons.ravel(), lats.ravel() @property def start_time(self): """Get the start time.""" - return self.finfo['start_time'] + return self.finfo["start_time"] @property def end_time(self): """Get the end time.""" end_time = datetime.combine(self.start_time.date(), - self.finfo['end_time'].time()) + self.finfo["end_time"].time()) if end_time < self.start_time: end_time += timedelta(days=1) return end_time def read_geo(self, key, info): """Read angles.""" - pairs = {('satellite_azimuth_angle', 'satellite_zenith_angle'): + pairs = {("satellite_azimuth_angle", "satellite_zenith_angle"): ("SatelliteAzimuthAngle", "SatelliteZenithAngle"), - ('solar_azimuth_angle', 'solar_zenith_angle'): + ("solar_azimuth_angle", "solar_zenith_angle"): ("SolarAzimuthAngle", "SolarZenithAngle"), - ('dnb_solar_azimuth_angle', 'dnb_solar_zenith_angle'): + ("dnb_solar_azimuth_angle", "dnb_solar_zenith_angle"): ("SolarAzimuthAngle", "SolarZenithAngle"), - ('dnb_lunar_azimuth_angle', 'dnb_lunar_zenith_angle'): + ("dnb_lunar_azimuth_angle", "dnb_lunar_zenith_angle"): ("LunarAzimuthAngle", "LunarZenithAngle"), } if self.lons is None or self.lats is None: self.lons, self.lats = self.navigate() for pair, fkeys in pairs.items(): - if key['name'] in pair: + if key["name"] in pair: if (self.cache.get(pair[0]) is None or self.cache.get(pair[1]) is None): angles = self.angles(*fkeys) self.cache[pair[0]], self.cache[pair[1]] = angles - if key['name'] == pair[0]: - return xr.DataArray(self.cache[pair[0]], name=key['name'], - attrs=self.mda, dims=('y', 'x')) + if key["name"] == pair[0]: + return xr.DataArray(self.cache[pair[0]], name=key["name"], + attrs=self.mda, dims=("y", "x")) else: - return xr.DataArray(self.cache[pair[1]], name=key['name'], - attrs=self.mda, dims=('y', 'x')) + return xr.DataArray(self.cache[pair[1]], name=key["name"], + attrs=self.mda, dims=("y", "x")) - if info.get('standard_name') in ['latitude', 'longitude']: + if info.get("standard_name") in ["latitude", "longitude"]: mda = self.mda.copy() mda.update(info) - if info['standard_name'] == 'longitude': - return xr.DataArray(self.lons, attrs=mda, dims=('y', 'x')) + if info["standard_name"] == "longitude": + return xr.DataArray(self.lons, attrs=mda, dims=("y", "x")) else: - return xr.DataArray(self.lats, attrs=mda, dims=('y', 'x')) + return xr.DataArray(self.lats, attrs=mda, dims=("y", "x")) - if key['name'] == 'dnb_moon_illumination_fraction': + if key["name"] == "dnb_moon_illumination_fraction": mda = self.mda.copy() mda.update(info) return xr.DataArray(da.from_array(self.geography["MoonIllumFraction"]), @@ -222,7 +222,7 @@ def read_geo(self, key, info): def read_dataset(self, dataset_key, info): """Read a dataset.""" h5f = self.h5f - channel = _channels_dict[dataset_key['name']] + channel = _channels_dict[dataset_key["name"]] chan_dict = dict([(key.split("-")[1], key) for key in h5f["All_Data"].keys() if key.startswith("VIIRS")]) @@ -230,39 +230,39 @@ def read_dataset(self, dataset_key, info): h5rads = h5f["All_Data"][chan_dict[channel]]["Radiance"] chunks = h5rads.chunks or CHUNK_SIZE rads = xr.DataArray(da.from_array(h5rads, chunks=chunks), - name=dataset_key['name'], - dims=['y', 'x']).astype(np.float32) + name=dataset_key["name"], + dims=["y", "x"]).astype(np.float32) h5attrs = h5rads.attrs scans = h5f["All_Data"]["NumberOfScans"][0] rads = rads[:scans * 16, :] rads = rads.where(rads <= 65526) try: - rads = xr.where(rads <= h5attrs['Threshold'], - rads * h5attrs['RadianceScaleLow'] + - h5attrs['RadianceOffsetLow'], - rads * h5attrs['RadianceScaleHigh'] + - h5attrs['RadianceOffsetHigh']) + rads = xr.where(rads <= h5attrs["Threshold"], + rads * h5attrs["RadianceScaleLow"] + + h5attrs["RadianceOffsetLow"], + rads * h5attrs["RadianceScaleHigh"] + + h5attrs["RadianceOffsetHigh"]) except (KeyError, AttributeError): logger.info("Missing attribute for scaling of %s.", channel) pass unit = "W m-2 sr-1 μm-1" - if dataset_key['calibration'] == 'counts': + if dataset_key["calibration"] == "counts": raise NotImplementedError("Can't get counts from this data") - if dataset_key['calibration'] in ['reflectance', 'brightness_temperature']: + if dataset_key["calibration"] in ["reflectance", "brightness_temperature"]: # do calibrate try: # First guess: VIS or NIR data - a_vis = h5attrs['EquivalentWidth'] - b_vis = h5attrs['IntegratedSolarIrradiance'] - dse = h5attrs['EarthSunDistanceNormalised'] + a_vis = h5attrs["EquivalentWidth"] + b_vis = h5attrs["IntegratedSolarIrradiance"] + dse = h5attrs["EarthSunDistanceNormalised"] rads *= 100 * np.pi * a_vis / b_vis * (dse**2) unit = "%" except KeyError: # Maybe it's IR data? try: - a_ir = h5attrs['BandCorrectionCoefficientA'] - b_ir = h5attrs['BandCorrectionCoefficientB'] - lambda_c = h5attrs['CentralWaveLength'] + a_ir = h5attrs["BandCorrectionCoefficientA"] + b_ir = h5attrs["BandCorrectionCoefficientB"] + lambda_c = h5attrs["CentralWaveLength"] rads *= 1e6 rads = (h * c) / (k * lambda_c * np.log(1 + @@ -274,12 +274,12 @@ def read_dataset(self, dataset_key, info): except KeyError: logger.warning("Calibration failed.") - elif dataset_key['calibration'] != 'radiance': + elif dataset_key["calibration"] != "radiance": raise ValueError("Calibration parameter should be radiance, " "reflectance or brightness_temperature") rads = rads.clip(min=0) rads.attrs = self.mda - rads.attrs['units'] = unit + rads.attrs["units"] = unit return rads def expand_angle_and_nav(self, arrays): @@ -326,7 +326,7 @@ def navigate(self): return expanded def _get_geographical_chunks(self): - shape = self.geography['Longitude'].shape + shape = self.geography["Longitude"].shape horizontal_chunks = (self.nb_tiepoint_zones + 1).compute() chunks = (shape[0], tuple(horizontal_chunks)) return chunks diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index a8c6c934b2..646d7e0d17 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -86,28 +86,28 @@ def __init__(self, filename, filename_info, filetype_info): decode_cf=True, mask_and_scale=True, chunks={ - 'Columns': -1, - 'Rows': row_chunks_m, - 'Along_Scan_375m': -1, - 'Along_Track_375m': row_chunks_i, - 'Along_Scan_750m': -1, - 'Along_Track_750m': row_chunks_m, + "Columns": -1, + "Rows": row_chunks_m, + "Along_Scan_375m": -1, + "Along_Track_375m": row_chunks_i, + "Along_Scan_750m": -1, + "Along_Track_750m": row_chunks_m, }) - if 'Columns' in self.nc.dims: - self.nc = self.nc.rename({'Columns': 'x', 'Rows': 'y'}) - elif 'Along_Track_375m' in self.nc.dims: - self.nc = self.nc.rename({'Along_Scan_375m': 'x', 'Along_Track_375m': 'y'}) - self.nc = self.nc.rename({'Along_Scan_750m': 'x', 'Along_Track_750m': 'y'}) + if "Columns" in self.nc.dims: + self.nc = self.nc.rename({"Columns": "x", "Rows": "y"}) + elif "Along_Track_375m" in self.nc.dims: + self.nc = self.nc.rename({"Along_Scan_375m": "x", "Along_Track_375m": "y"}) + self.nc = self.nc.rename({"Along_Scan_750m": "x", "Along_Track_750m": "y"}) # For some reason, no 'standard_name' is defined in some netCDF files, so # here we manually make the definitions. - if 'Latitude' in self.nc: - self.nc['Latitude'].attrs.update({'standard_name': 'latitude'}) - if 'Longitude' in self.nc: - self.nc['Longitude'].attrs.update({'standard_name': 'longitude'}) + if "Latitude" in self.nc: + self.nc["Latitude"].attrs.update({"standard_name": "latitude"}) + if "Longitude" in self.nc: + self.nc["Longitude"].attrs.update({"standard_name": "longitude"}) - self.algorithm_version = filename_info['platform_shortname'] - self.sensor_name = 'viirs' + self.algorithm_version = filename_info["platform_shortname"] + self.sensor_name = "viirs" def rows_per_scans(self, data_arr: xr.DataArray) -> int: """Get number of array rows per instrument scan based on data resolution.""" @@ -115,7 +115,7 @@ def rows_per_scans(self, data_arr: xr.DataArray) -> int: def get_dataset(self, dataset_id: DataID, info: dict) -> xr.DataArray: """Get the dataset.""" - data_arr = self.nc[info['file_key']] + data_arr = self.nc[info["file_key"]] data_arr = self._mask_invalid(data_arr, info) units = info.get("units", data_arr.attrs.get("units")) if units is None or units == "unitless": @@ -150,27 +150,27 @@ def _decode_flag_meanings(data_arr: xr.DataArray): flag_meanings = data_arr.attrs.get("flag_meanings", None) if isinstance(flag_meanings, str) and "\n" not in flag_meanings: # only handle CF-standard flag meanings - data_arr.attrs['flag_meanings'] = [flag for flag in data_arr.attrs['flag_meanings'].split(' ')] + data_arr.attrs["flag_meanings"] = [flag for flag in data_arr.attrs["flag_meanings"].split(" ")] @property def start_time(self): """Get first date/time when observations were recorded.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get last date/time when observations were recorded.""" - return self.filename_info['end_time'] + return self.filename_info["end_time"] @property def platform_name(self): """Get platform name.""" - platform_path = self.filename_info['platform_shortname'] - platform_dict = {'NPP': 'Suomi-NPP', - 'JPSS-1': 'NOAA-20', - 'J01': 'NOAA-20', - 'JPSS-2': 'NOAA-21', - 'J02': 'NOAA-21'} + platform_path = self.filename_info["platform_shortname"] + platform_dict = {"NPP": "Suomi-NPP", + "JPSS-1": "NOAA-20", + "J01": "NOAA-20", + "JPSS-2": "NOAA-21", + "J02": "NOAA-21"} return platform_dict[platform_path.upper()] def available_datasets(self, configured_datasets=None): @@ -212,7 +212,7 @@ def available_datasets(self, configured_datasets=None): # file handler so let's yield early yield is_avail, ds_info continue - if self.file_type_matches(ds_info['file_type']) is None: + if self.file_type_matches(ds_info["file_type"]) is None: # this is not the file type for this dataset yield None, ds_info yield file_key in self.nc, ds_info @@ -278,18 +278,18 @@ def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray: def _get_veg_index_good_mask(self) -> xr.DataArray: # each mask array should be TRUE when pixels are UNACCEPTABLE - qf1 = self.nc['QF1 Surface Reflectance'] + qf1 = self.nc["QF1 Surface Reflectance"] has_sun_glint = (qf1 & 0b11000000) > 0 is_cloudy = (qf1 & 0b00001100) > 0 # mask everything but "confident clear" cloud_quality = (qf1 & 0b00000011) < 0b10 - qf2 = self.nc['QF2 Surface Reflectance'] + qf2 = self.nc["QF2 Surface Reflectance"] has_snow_or_ice = (qf2 & 0b00100000) > 0 has_cloud_shadow = (qf2 & 0b00001000) > 0 water_mask = (qf2 & 0b00000111) has_water = (water_mask <= 0b010) | (water_mask == 0b101) # shallow water, deep ocean, arctic - qf7 = self.nc['QF7 Surface Reflectance'] + qf7 = self.nc["QF7 Surface Reflectance"] has_aerosols = (qf7 & 0b00001100) > 0b1000 # high aerosol quantity adjacent_to_cloud = (qf7 & 0b00000010) > 0 diff --git a/satpy/readers/viirs_edr_active_fires.py b/satpy/readers/viirs_edr_active_fires.py index f1bcf4d3cc..bd8f3f6d69 100644 --- a/satpy/readers/viirs_edr_active_fires.py +++ b/satpy/readers/viirs_edr_active_fires.py @@ -44,7 +44,7 @@ def __init__(self, filename, filename_info, filetype_info, super(VIIRSActiveFiresFileHandler, self).__init__( filename, filename_info, filetype_info, auto_maskandscale=auto_maskandscale, xarray_kwargs=xarray_kwargs) - self.prefix = filetype_info.get('variable_prefix') + self.prefix = filetype_info.get("variable_prefix") def get_dataset(self, dsid, dsinfo): """Get requested data as DataArray. @@ -57,24 +57,24 @@ def get_dataset(self, dsid, dsinfo): Dask DataArray: Data """ - key = dsinfo.get('file_key', dsid['name']).format(variable_prefix=self.prefix) + key = dsinfo.get("file_key", dsid["name"]).format(variable_prefix=self.prefix) data = self[key] # rename "phoney dims" - data = data.rename(dict(zip(data.dims, ['y', 'x']))) + data = data.rename(dict(zip(data.dims, ["y", "x"]))) # handle attributes from YAML - for key in ('units', 'standard_name', 'flag_meanings', 'flag_values', '_FillValue'): + for key in ("units", "standard_name", "flag_meanings", "flag_values", "_FillValue"): # we only want to add information that isn't present already if key in dsinfo and key not in data.attrs: data.attrs[key] = dsinfo[key] - if isinstance(data.attrs.get('flag_meanings'), str): - data.attrs['flag_meanings'] = data.attrs['flag_meanings'].split(' ') + if isinstance(data.attrs.get("flag_meanings"), str): + data.attrs["flag_meanings"] = data.attrs["flag_meanings"].split(" ") # use more common CF standard units - if data.attrs.get('units') == 'kelvins': - data.attrs['units'] = 'K' + if data.attrs.get("units") == "kelvins": + data.attrs["units"] = "K" - data.attrs["platform_name"] = PLATFORM_MAP.get(self.filename_info['satellite_name'].upper(), "unknown") + data.attrs["platform_name"] = PLATFORM_MAP.get(self.filename_info["satellite_name"].upper(), "unknown") data.attrs["sensor"] = self.sensor_name return data @@ -82,12 +82,12 @@ def get_dataset(self, dsid, dsinfo): @property def start_time(self): """Get first date/time when observations were recorded.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get last date/time when observations were recorded.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def sensor_name(self): @@ -112,33 +112,33 @@ def __init__(self, filename, filename_info, filetype_info): filetype_info: Filetype information """ - skip_rows = filetype_info.get('skip_rows', 15) - columns = filetype_info['columns'] + skip_rows = filetype_info.get("skip_rows", 15) + columns = filetype_info["columns"] self.file_content = dd.read_csv(filename, skiprows=skip_rows, header=None, names=columns) super(VIIRSActiveFiresTextFileHandler, self).__init__(filename, filename_info, filetype_info) - self.platform_name = PLATFORM_MAP.get(self.filename_info['satellite_name'].upper(), "unknown") + self.platform_name = PLATFORM_MAP.get(self.filename_info["satellite_name"].upper(), "unknown") def get_dataset(self, dsid, dsinfo): """Get requested data as DataArray.""" - ds = self[dsid['name']].to_dask_array(lengths=True) + ds = self[dsid["name"]].to_dask_array(lengths=True) data = xr.DataArray(ds, dims=("y",), attrs={"platform_name": self.platform_name, "sensor": "VIIRS"}) - for key in ('units', 'standard_name', 'flag_meanings', 'flag_values', '_FillValue'): + for key in ("units", "standard_name", "flag_meanings", "flag_values", "_FillValue"): # we only want to add information that isn't present already if key in dsinfo and key not in data.attrs: data.attrs[key] = dsinfo[key] - if isinstance(data.attrs.get('flag_meanings'), str): - data.attrs['flag_meanings'] = data.attrs['flag_meanings'].split(' ') + if isinstance(data.attrs.get("flag_meanings"), str): + data.attrs["flag_meanings"] = data.attrs["flag_meanings"].split(" ") return data @property def start_time(self): """Get first date/time when observations were recorded.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get last date/time when observations were recorded.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) def __getitem__(self, key): """Get file content for 'key'.""" diff --git a/satpy/readers/viirs_edr_flood.py b/satpy/readers/viirs_edr_flood.py index 2d9c319656..2625d6d8fc 100644 --- a/satpy/readers/viirs_edr_flood.py +++ b/satpy/readers/viirs_edr_flood.py @@ -29,17 +29,17 @@ class VIIRSEDRFlood(HDF4FileHandler): @property def start_time(self): """Get start time.""" - return self.filename_info['start_time'] + return self.filename_info["start_time"] @property def end_time(self): """Get end time.""" - return self.filename_info.get('end_time', self.start_time) + return self.filename_info.get("end_time", self.start_time) @property def sensor_name(self): """Get sensor name.""" - sensor = self['/attr/SensorIdentifyCode'] + sensor = self["/attr/SensorIdentifyCode"] if isinstance(sensor, np.ndarray): return str(sensor.astype(str)).lower() return sensor.lower() @@ -47,7 +47,7 @@ def sensor_name(self): @property def platform_name(self): """Get platform name.""" - platform_name = self['/attr/Satellitename'] + platform_name = self["/attr/Satellitename"] if isinstance(platform_name, np.ndarray): return str(platform_name.astype(str)).lower() return platform_name.lower() @@ -58,23 +58,23 @@ def get_metadata(self, data, ds_info): metadata.update(data.attrs) metadata.update(ds_info) metadata.update({ - 'sensor': self.sensor_name, - 'platform_name': self.platform_name, - 'start_time': self.start_time, - 'end_time': self.end_time, + "sensor": self.sensor_name, + "platform_name": self.platform_name, + "start_time": self.start_time, + "end_time": self.end_time, }) return metadata def get_dataset(self, ds_id, ds_info): """Get dataset.""" - data = self[ds_id['name']] + data = self[ds_id["name"]] data.attrs = self.get_metadata(data, ds_info) - fill = data.attrs.pop('_Fillvalue') - offset = data.attrs.get('add_offset') - scale_factor = data.attrs.get('scale_factor') + fill = data.attrs.pop("_Fillvalue") + offset = data.attrs.get("add_offset") + scale_factor = data.attrs.get("scale_factor") data = data.where(data != fill) if scale_factor is not None and offset is not None: @@ -85,25 +85,25 @@ def get_dataset(self, ds_id, ds_info): def get_area_def(self, ds_id): """Get area definition.""" - data = self[ds_id['name']] + data = self[ds_id["name"]] proj_dict = { - 'proj': 'latlong', - 'datum': 'WGS84', - 'ellps': 'WGS84', - 'no_defs': True + "proj": "latlong", + "datum": "WGS84", + "ellps": "WGS84", + "no_defs": True } - area_extent = [data.attrs.get('ProjectionMinLongitude'), data.attrs.get('ProjectionMinLatitude'), - data.attrs.get('ProjectionMaxLongitude'), data.attrs.get('ProjectionMaxLatitude')] + area_extent = [data.attrs.get("ProjectionMinLongitude"), data.attrs.get("ProjectionMinLatitude"), + data.attrs.get("ProjectionMaxLongitude"), data.attrs.get("ProjectionMaxLatitude")] area = geometry.AreaDefinition( - 'viirs_flood_area', - 'name_of_proj', - 'id_of_proj', + "viirs_flood_area", + "name_of_proj", + "id_of_proj", proj_dict, - int(self.filename_info['dim0']), - int(self.filename_info['dim1']), + int(self.filename_info["dim0"]), + int(self.filename_info["dim1"]), np.asarray(area_extent) ) diff --git a/satpy/readers/viirs_l1b.py b/satpy/readers/viirs_l1b.py index a265bb1f82..510a37165d 100644 --- a/satpy/readers/viirs_l1b.py +++ b/satpy/readers/viirs_l1b.py @@ -38,39 +38,39 @@ def _parse_datetime(self, datestr): def start_orbit_number(self): """Get start orbit number.""" try: - return int(self['/attr/orbit_number']) + return int(self["/attr/orbit_number"]) except KeyError: - return int(self['/attr/OrbitNumber']) + return int(self["/attr/OrbitNumber"]) @property def end_orbit_number(self): """Get end orbit number.""" try: - return int(self['/attr/orbit_number']) + return int(self["/attr/orbit_number"]) except KeyError: - return int(self['/attr/OrbitNumber']) + return int(self["/attr/OrbitNumber"]) @property def platform_name(self): """Get platform name.""" try: - res = self.get('/attr/platform', - self.filename_info['platform_shortname']) + res = self.get("/attr/platform", + self.filename_info["platform_shortname"]) except KeyError: - res = 'Unknown' + res = "Unknown" return { - 'JPSS-1': 'NOAA-20', - 'NP': 'Suomi-NPP', - 'J1': 'NOAA-20', - 'J2': 'NOAA-21', - 'JPSS-2': 'NOAA-21', + "JPSS-1": "NOAA-20", + "NP": "Suomi-NPP", + "J1": "NOAA-20", + "J2": "NOAA-21", + "JPSS-2": "NOAA-21", }.get(res, res) @property def sensor_name(self): """Get sensor name.""" - return self['/attr/instrument'].lower() + return self["/attr/instrument"].lower() def adjust_scaling_factors(self, factors, file_units, output_units): """Adjust scaling factors.""" @@ -96,35 +96,35 @@ def adjust_scaling_factors(self, factors, file_units, output_units): def get_shape(self, ds_id, ds_info): """Get shape.""" - var_path = self._dataset_name_to_var_path(ds_id['name'], ds_info) - return self.get(var_path + '/shape', 1) + var_path = self._dataset_name_to_var_path(ds_id["name"], ds_info) + return self.get(var_path + "/shape", 1) @property def start_time(self): """Get start time.""" - return self._parse_datetime(self['/attr/time_coverage_start']) + return self._parse_datetime(self["/attr/time_coverage_start"]) @property def end_time(self): """Get end time.""" - return self._parse_datetime(self['/attr/time_coverage_end']) + return self._parse_datetime(self["/attr/time_coverage_end"]) def _get_dataset_file_units(self, dataset_id, ds_info, var_path): - file_units = ds_info.get('file_units') + file_units = ds_info.get("file_units") if file_units is None: - file_units = self.get(var_path + '/attr/units') + file_units = self.get(var_path + "/attr/units") # they were almost completely CF compliant... if file_units == "none": file_units = "1" - if dataset_id.get('calibration') == 'radiance' and ds_info['units'] == 'W m-2 um-1 sr-1': - rad_units_path = var_path + '/attr/radiance_units' + if dataset_id.get("calibration") == "radiance" and ds_info["units"] == "W m-2 um-1 sr-1": + rad_units_path = var_path + "/attr/radiance_units" if rad_units_path in self: if file_units is None: - file_units = self[var_path + '/attr/radiance_units'] - if file_units == 'Watts/meter^2/steradian/micrometer': - file_units = 'W m-2 um-1 sr-1' - elif ds_info.get('units') == '%' and file_units is None: + file_units = self[var_path + "/attr/radiance_units"] + if file_units == "Watts/meter^2/steradian/micrometer": + file_units = "W m-2 um-1 sr-1" + elif ds_info.get("units") == "%" and file_units is None: # v1.1 and above of level 1 processing removed 'units' attribute # for all reflectance channels file_units = "1" @@ -132,54 +132,54 @@ def _get_dataset_file_units(self, dataset_id, ds_info, var_path): return file_units def _get_dataset_valid_range(self, dataset_id, ds_info, var_path): - if dataset_id.get('calibration') == 'radiance' and ds_info['units'] == 'W m-2 um-1 sr-1': - rad_units_path = var_path + '/attr/radiance_units' + if dataset_id.get("calibration") == "radiance" and ds_info["units"] == "W m-2 um-1 sr-1": + rad_units_path = var_path + "/attr/radiance_units" if rad_units_path in self: # we are getting a reflectance band but we want the radiance values # special scaling parameters - scale_factor = self[var_path + '/attr/radiance_scale_factor'] - scale_offset = self[var_path + '/attr/radiance_add_offset'] + scale_factor = self[var_path + "/attr/radiance_scale_factor"] + scale_offset = self[var_path + "/attr/radiance_add_offset"] else: # we are getting a btemp band but we want the radiance values # these are stored directly in the primary variable - scale_factor = self[var_path + '/attr/scale_factor'] - scale_offset = self[var_path + '/attr/add_offset'] - valid_min = self[var_path + '/attr/valid_min'] - valid_max = self[var_path + '/attr/valid_max'] - elif ds_info.get('units') == '%': + scale_factor = self[var_path + "/attr/scale_factor"] + scale_offset = self[var_path + "/attr/add_offset"] + valid_min = self[var_path + "/attr/valid_min"] + valid_max = self[var_path + "/attr/valid_max"] + elif ds_info.get("units") == "%": # normal reflectance - valid_min = self[var_path + '/attr/valid_min'] - valid_max = self[var_path + '/attr/valid_max'] - scale_factor = self[var_path + '/attr/scale_factor'] - scale_offset = self[var_path + '/attr/add_offset'] - elif ds_info.get('units') == 'K': + valid_min = self[var_path + "/attr/valid_min"] + valid_max = self[var_path + "/attr/valid_max"] + scale_factor = self[var_path + "/attr/scale_factor"] + scale_offset = self[var_path + "/attr/add_offset"] + elif ds_info.get("units") == "K": # normal brightness temperature # use a special LUT to get the actual values - lut_var_path = ds_info.get('lut', var_path + '_brightness_temperature_lut') + lut_var_path = ds_info.get("lut", var_path + "_brightness_temperature_lut") # we get the BT values from a look up table using the scaled radiance integers - valid_min = self[lut_var_path + '/attr/valid_min'] - valid_max = self[lut_var_path + '/attr/valid_max'] + valid_min = self[lut_var_path + "/attr/valid_min"] + valid_max = self[lut_var_path + "/attr/valid_max"] scale_factor = scale_offset = None else: - valid_min = self.get(var_path + '/attr/valid_min') - valid_max = self.get(var_path + '/attr/valid_max') - scale_factor = self.get(var_path + '/attr/scale_factor') - scale_offset = self.get(var_path + '/attr/add_offset') + valid_min = self.get(var_path + "/attr/valid_min") + valid_max = self.get(var_path + "/attr/valid_max") + scale_factor = self.get(var_path + "/attr/scale_factor") + scale_offset = self.get(var_path + "/attr/add_offset") return valid_min, valid_max, scale_factor, scale_offset def get_metadata(self, dataset_id, ds_info): """Get metadata.""" - var_path = self._dataset_name_to_var_path(dataset_id['name'], ds_info) + var_path = self._dataset_name_to_var_path(dataset_id["name"], ds_info) shape = self.get_shape(dataset_id, ds_info) file_units = self._get_dataset_file_units(dataset_id, ds_info, var_path) # Get extra metadata if self._is_scan_based_array(shape): - rows_per_scan = int(shape[0] / self['/dimension/number_of_scans']) - ds_info.setdefault('rows_per_scan', rows_per_scan) + rows_per_scan = int(shape[0] / self["/dimension/number_of_scans"]) + ds_info.setdefault("rows_per_scan", rows_per_scan) - i = getattr(self[var_path], 'attrs', {}) + i = getattr(self[var_path], "attrs", {}) i.update(ds_info) i.update(dataset_id.to_dict()) i.update({ @@ -195,22 +195,22 @@ def get_metadata(self, dataset_id, ds_info): return i def _is_scan_based_array(self, shape): - return '/dimension/number_of_scans' in self and isinstance(shape, tuple) and shape + return "/dimension/number_of_scans" in self and isinstance(shape, tuple) and shape def get_dataset(self, dataset_id, ds_info): """Get dataset.""" - var_path = self._dataset_name_to_var_path(dataset_id['name'], ds_info) + var_path = self._dataset_name_to_var_path(dataset_id["name"], ds_info) metadata = self.get_metadata(dataset_id, ds_info) valid_min, valid_max, scale_factor, scale_offset = self._get_dataset_valid_range(dataset_id, ds_info, var_path) - if dataset_id.get('calibration') == 'radiance' and ds_info['units'] == 'W m-2 um-1 sr-1': + if dataset_id.get("calibration") == "radiance" and ds_info["units"] == "W m-2 um-1 sr-1": data = self[var_path] - elif ds_info.get('units') == '%': + elif ds_info.get("units") == "%": data = self[var_path] - elif ds_info.get('units') == 'K': + elif ds_info.get("units") == "K": # normal brightness temperature # use a special LUT to get the actual values - lut_var_path = ds_info.get('lut', var_path + '_brightness_temperature_lut') + lut_var_path = ds_info.get("lut", var_path + "_brightness_temperature_lut") data = self[var_path] # we get the BT values from a look up table using the scaled radiance integers index_arr = data.data.astype(int) @@ -223,21 +223,21 @@ def get_dataset(self, dataset_id, ds_info): if valid_min is not None and valid_max is not None: data = data.where((data >= valid_min) & (data <= valid_max)) - if data.attrs.get('units') in ['%', 'K', '1', 'W m-2 um-1 sr-1'] and \ - 'flag_meanings' in data.attrs: + if data.attrs.get("units") in ["%", "K", "1", "W m-2 um-1 sr-1"] and \ + "flag_meanings" in data.attrs: # flag meanings don't mean anything anymore for these variables # these aren't category products - data.attrs.pop('flag_meanings', None) - data.attrs.pop('flag_values', None) + data.attrs.pop("flag_meanings", None) + data.attrs.pop("flag_values", None) factors = (scale_factor, scale_offset) - factors = self.adjust_scaling_factors(factors, metadata['file_units'], ds_info.get("units")) + factors = self.adjust_scaling_factors(factors, metadata["file_units"], ds_info.get("units")) if factors[0] != 1 or factors[1] != 0: data *= factors[0] data += factors[1] # rename dimensions to correspond to satpy's 'y' and 'x' standard - if 'number_of_lines' in data.dims: - data = data.rename({'number_of_lines': 'y', 'number_of_pixels': 'x'}) + if "number_of_lines" in data.dims: + data = data.rename({"number_of_lines": "y", "number_of_pixels": "x"}) return data def available_datasets(self, configured_datasets=None): @@ -255,11 +255,11 @@ def available_datasets(self, configured_datasets=None): # file handler so let's yield early yield is_avail, ds_info continue - ft_matches = self.file_type_matches(ds_info['file_type']) - var_path = self._dataset_name_to_var_path(ds_info['name'], ds_info) + ft_matches = self.file_type_matches(ds_info["file_type"]) + var_path = self._dataset_name_to_var_path(ds_info["name"], ds_info) is_in_file = var_path in self yield ft_matches and is_in_file, ds_info @staticmethod def _dataset_name_to_var_path(dataset_name: str, ds_info: dict) -> str: - return ds_info.get('file_key', 'observation_data/{}'.format(dataset_name)) + return ds_info.get("file_key", "observation_data/{}".format(dataset_name)) diff --git a/satpy/readers/viirs_sdr.py b/satpy/readers/viirs_sdr.py index 71379b2066..db9ba9ba10 100644 --- a/satpy/readers/viirs_sdr.py +++ b/satpy/readers/viirs_sdr.py @@ -83,18 +83,18 @@ class VIIRSSDRFileHandler(JPSS_SDR_FileHandler): def __init__(self, filename, filename_info, filetype_info, use_tc=None, **kwargs): """Initialize file handler.""" - self.datasets = filename_info['datasets'].split('-') + self.datasets = filename_info["datasets"].split("-") self.use_tc = use_tc super().__init__(filename, filename_info, filetype_info, **kwargs) def __getitem__(self, item): """Get item.""" - if '*' in item: + if "*" in item: # this is an aggregated field that can't easily be loaded, need to # join things together idx = 0 base_item = item - item = base_item.replace('*', str(idx)) + item = base_item.replace("*", str(idx)) result = [] while True: try: @@ -106,7 +106,7 @@ def __getitem__(self, item): break idx += 1 - item = base_item.replace('*', str(idx)) + item = base_item.replace("*", str(idx)) return result else: return super().__getitem__(item) @@ -120,11 +120,11 @@ def get_dataset(self, dataset_id, ds_info): scans for each granule is read from: ``Data_Products/...Gran_x/N_Number_Of_Scans``. """ - dataset_group = [ds_group for ds_group in ds_info['dataset_groups'] if ds_group in self.datasets] + dataset_group = [ds_group for ds_group in ds_info["dataset_groups"] if ds_group in self.datasets] if not dataset_group: return dataset_group = dataset_group[0] - ds_info['dataset_group'] = dataset_group + ds_info["dataset_group"] = dataset_group var_path = self._generate_file_key(dataset_id, ds_info) data = self.concatenate_dataset(dataset_group, var_path) @@ -138,17 +138,17 @@ def get_dataset(self, dataset_id, ds_info): def get_bounding_box(self): """Get the bounding box of this file.""" from pyproj import Geod - geod = Geod(ellps='WGS84') + geod = Geod(ellps="WGS84") dataset_group = DATASET_KEYS[self.datasets[0]] idx = 0 lons_ring = None lats_ring = None while True: - path = 'Data_Products/{dataset_group}/{dataset_group}_Gran_{idx}/attr/' + path = "Data_Products/{dataset_group}/{dataset_group}_Gran_{idx}/attr/" prefix = path.format(dataset_group=dataset_group, idx=idx) try: - lats = self.file_content[prefix + 'G-Ring_Latitude'] - lons = self.file_content[prefix + 'G-Ring_Longitude'] + lats = self.file_content[prefix + "G-Ring_Latitude"] + lons = self.file_content[prefix + "G-Ring_Longitude"] if lons_ring is None: lons_ring = lons lats_ring = lats @@ -215,16 +215,16 @@ def filter_filenames_by_info(self, filename_items): geo_del = [] viirs_del = [] for filename, filename_info in filename_items: - datasets = filename_info['datasets'].split('-') + datasets = filename_info["datasets"].split("-") if not self._is_viirs_dataset(datasets): viirs_del.append(filename) - if ('GITCO' in datasets) or ('GMTCO' in datasets): + if ("GITCO" in datasets) or ("GMTCO" in datasets): if self.use_tc is False: geo_del.append(filename) else: geo_keep.append(filename) - elif ('GIMGO' in datasets) or ('GMODO' in datasets): + elif ("GIMGO" in datasets) or ("GMODO" in datasets): if self.use_tc is True: geo_del.append(filename) else: @@ -240,20 +240,20 @@ def _remove_non_viirs_datasets_from_files(self, filename_items, files_to_edit): return self._remove_datasets_from_files(filename_items, files_to_edit, no_viirs) def _remove_geo_datasets_from_files(self, filename_items, files_to_edit): - datasets_to_consider = ['GITCO', 'GMTCO', 'GIMGO', 'GMODO'] + datasets_to_consider = ["GITCO", "GMTCO", "GIMGO", "GMODO"] return self._remove_datasets_from_files(filename_items, files_to_edit, datasets_to_consider) def _remove_datasets_from_files(self, filename_items, files_to_edit, considered_datasets): fdict = dict(filename_items) for to_del in files_to_edit: - fdict[to_del]['datasets'] = fdict[to_del]['datasets'].split('-') + fdict[to_del]["datasets"] = fdict[to_del]["datasets"].split("-") for dataset in considered_datasets: with suppress(ValueError): - fdict[to_del]['datasets'].remove(dataset) - if not fdict[to_del]['datasets']: + fdict[to_del]["datasets"].remove(dataset) + if not fdict[to_del]["datasets"]: del fdict[to_del] else: - fdict[to_del]['datasets'] = "-".join(fdict[to_del]['datasets']) + fdict[to_del]["datasets"] = "-".join(fdict[to_del]["datasets"]) filename_items = fdict.items() return filename_items @@ -269,15 +269,15 @@ def _load_filenames_from_geo_ref(self, dsid): try: # get the filename and remove the creation time # which is often wrong - fn = fh['/attr/N_GEO_Ref'][:46] + '*.h5' + fn = fh["/attr/N_GEO_Ref"][:46] + "*.h5" fns.extend(glob(os.path.join(base_dir, fn))) # usually is non-terrain corrected file, add the terrain # corrected file too - if fn[:5] == 'GIMGO': - fn = 'GITCO' + fn[5:] - elif fn[:5] == 'GMODO': - fn = 'GMTCO' + fn[5:] + if fn[:5] == "GIMGO": + fn = "GITCO" + fn[5:] + elif fn[:5] == "GMODO": + fn = "GMTCO" + fn[5:] else: continue fns.extend(glob(os.path.join(base_dir, fn))) @@ -288,22 +288,22 @@ def _load_filenames_from_geo_ref(self, dsid): def _get_primary_secondary_geo_groups(self, ds_info): """Find out which geolocation files are needed.""" - if ds_info['dataset_groups'][0].startswith('GM'): + if ds_info["dataset_groups"][0].startswith("GM"): if self.use_tc is False: - prime_geo = 'GMODO' - second_geo = 'GMTCO' + prime_geo = "GMODO" + second_geo = "GMTCO" else: - prime_geo = 'GMTCO' - second_geo = 'GMODO' - elif ds_info['dataset_groups'][0].startswith('GI'): + prime_geo = "GMTCO" + second_geo = "GMODO" + elif ds_info["dataset_groups"][0].startswith("GI"): if self.use_tc is False: - prime_geo = 'GIMGO' - second_geo = 'GITCO' + prime_geo = "GIMGO" + second_geo = "GITCO" else: - prime_geo = 'GITCO' - second_geo = 'GIMGO' + prime_geo = "GITCO" + second_geo = "GIMGO" else: - raise ValueError('Unknown dataset group %s' % ds_info['dataset_groups'][0]) + raise ValueError("Unknown dataset group %s" % ds_info["dataset_groups"][0]) return prime_geo, second_geo def get_right_geo_fhs(self, dsid, fhs): @@ -313,7 +313,7 @@ def get_right_geo_fhs(self, dsid, fhs): desired, other = split_desired_other(fhs, prime_geo, second_geo) if desired: try: - ds_info['dataset_groups'].remove(second_geo) + ds_info["dataset_groups"].remove(second_geo) except ValueError: pass return desired @@ -324,13 +324,13 @@ def _get_file_handlers(self, dsid): """Get the file handler to load this dataset.""" ds_info = self.all_ids[dsid] - fhs = [fh for fh in self.file_handlers['generic_file'] - if set(fh.datasets) & set(ds_info['dataset_groups'])] + fhs = [fh for fh in self.file_handlers["generic_file"] + if set(fh.datasets) & set(ds_info["dataset_groups"])] if not fhs: LOG.warning("Required file type '%s' not found or loaded for " - "'%s'", ds_info['file_type'], dsid['name']) + "'%s'", ds_info["file_type"], dsid["name"]) else: - if len(set(ds_info['dataset_groups']) & {'GITCO', 'GIMGO', 'GMTCO', 'GMODO'}) > 1: + if len(set(ds_info["dataset_groups"]) & {"GITCO", "GIMGO", "GMTCO", "GMODO"}) > 1: fhs = self.get_right_geo_fhs(dsid, fhs) return fhs @@ -351,12 +351,12 @@ def _get_coordinates_for_dataset_key(self, dsid): # check the dataset file for the geolocation filename geo_filenames = self._load_filenames_from_geo_ref(dsid) self._create_new_geo_file_handlers(geo_filenames) - self._remove_not_loaded_geo_dataset_group(c_info['dataset_groups'], prime_geo, second_geo) + self._remove_not_loaded_geo_dataset_group(c_info["dataset_groups"], prime_geo, second_geo) return coords def _geo_dataset_groups(self, c_info): - if len(c_info['dataset_groups']) == 1: # filtering already done + if len(c_info["dataset_groups"]) == 1: # filtering already done return None, None try: prime_geo, second_geo = self._get_primary_secondary_geo_groups(c_info) @@ -365,12 +365,12 @@ def _geo_dataset_groups(self, c_info): return None, None def _create_new_geo_file_handlers(self, geo_filenames): - existing_filenames = set([fh.filename for fh in self.file_handlers['generic_file']]) + existing_filenames = set([fh.filename for fh in self.file_handlers["generic_file"]]) geo_filenames = set(geo_filenames) - existing_filenames self.create_filehandlers(geo_filenames) def _remove_not_loaded_geo_dataset_group(self, c_dataset_groups, prime_geo, second_geo): - all_fhs = self.file_handlers['generic_file'] + all_fhs = self.file_handlers["generic_file"] desired, other = split_desired_other(all_fhs, prime_geo, second_geo) group_to_remove = second_geo if desired else prime_geo c_dataset_groups.remove(group_to_remove) diff --git a/satpy/readers/viirs_vgac_l1c_nc.py b/satpy/readers/viirs_vgac_l1c_nc.py index e4a29c27f1..0fa8ddf782 100644 --- a/satpy/readers/viirs_vgac_l1c_nc.py +++ b/satpy/readers/viirs_vgac_l1c_nc.py @@ -37,9 +37,9 @@ def __init__(self, filename, filename_info, filetype_info): filename, filename_info, filetype_info) self.engine = "h5netcdf" - self._start_time = filename_info['start_time'] + self._start_time = filename_info["start_time"] self._end_time = None - self.sensor = 'viirs' + self.sensor = "viirs" self.filename_info = filename_info def calibrate(self, data, yaml_info, file_key, nc): @@ -75,11 +75,11 @@ def set_time_attrs(self, data): def get_dataset(self, key, yaml_info): """Get dataset.""" - logger.debug("Getting data for: %s", yaml_info['name']) + logger.debug("Getting data for: %s", yaml_info["name"]) nc = xr.open_dataset(self.filename, engine=self.engine, decode_times=False, - chunks={'y': CHUNK_SIZE, 'x': 800}) - name = yaml_info.get('nc_store_name', yaml_info['name']) - file_key = yaml_info.get('nc_key', name) + chunks={"y": CHUNK_SIZE, "x": 800}) + name = yaml_info.get("nc_store_name", yaml_info["name"]) + file_key = yaml_info.get("nc_key", name) data = nc[file_key] data = self.calibrate(data, yaml_info, file_key, nc) data.attrs.update(nc.attrs) # For now add global attributes to all datasets diff --git a/satpy/readers/virr_l1b.py b/satpy/readers/virr_l1b.py index 0ffe7251cb..260666ff8b 100644 --- a/satpy/readers/virr_l1b.py +++ b/satpy/readers/virr_l1b.py @@ -77,24 +77,24 @@ class VIRR_L1B(HDF5FileHandler): def __init__(self, filename, filename_info, filetype_info): """Open file and perform initial setup.""" super(VIRR_L1B, self).__init__(filename, filename_info, filetype_info) - LOG.debug('day/night flag for {0}: {1}'.format(filename, self['/attr/Day Or Night Flag'])) - self.geolocation_prefix = filetype_info['geolocation_prefix'] - self.platform_id = filename_info['platform_id'] - self.l1b_prefix = 'Data/' - self.wave_number = 'Emissive_Centroid_Wave_Number' + LOG.debug("day/night flag for {0}: {1}".format(filename, self["/attr/Day Or Night Flag"])) + self.geolocation_prefix = filetype_info["geolocation_prefix"] + self.platform_id = filename_info["platform_id"] + self.l1b_prefix = "Data/" + self.wave_number = "Emissive_Centroid_Wave_Number" # Else filename_info['platform_id'] == FY3C. - if filename_info['platform_id'] == 'FY3B': - self.l1b_prefix = '' - self.wave_number = 'Emmisive_Centroid_Wave_Number' + if filename_info["platform_id"] == "FY3B": + self.l1b_prefix = "" + self.wave_number = "Emmisive_Centroid_Wave_Number" def get_dataset(self, dataset_id, ds_info): """Create DataArray from file content for `dataset_id`.""" - file_key = self.geolocation_prefix + ds_info.get('file_key', dataset_id['name']) - if self.platform_id == 'FY3B': - file_key = file_key.replace('Data/', '') + file_key = self.geolocation_prefix + ds_info.get("file_key", dataset_id["name"]) + if self.platform_id == "FY3B": + file_key = file_key.replace("Data/", "") data = self[file_key] - band_index = ds_info.get('band_index') - valid_range = data.attrs.pop('valid_range', None) + band_index = ds_info.get("band_index") + valid_range = data.attrs.pop("valid_range", None) if isinstance(valid_range, np.ndarray): valid_range = valid_range.tolist() if band_index is not None: @@ -102,50 +102,50 @@ def get_dataset(self, dataset_id, ds_info): if valid_range: data = data.where((data >= valid_range[0]) & (data <= valid_range[1])) - if 'Emissive' in file_key: + if "Emissive" in file_key: self._calibrate_emissive(data, band_index) - elif 'RefSB' in file_key: + elif "RefSB" in file_key: data = self._calibrate_reflective(data, band_index) else: - slope = self._correct_slope(self[file_key + '/attr/Slope']) - intercept = self[file_key + '/attr/Intercept'] + slope = self._correct_slope(self[file_key + "/attr/Slope"]) + intercept = self[file_key + "/attr/Intercept"] if valid_range: data = data.where((data >= valid_range[0]) & (data <= valid_range[1])) data = data * slope + intercept - new_dims = {old: new for old, new in zip(data.dims, ('y', 'x'))} + new_dims = {old: new for old, new in zip(data.dims, ("y", "x"))} data = data.rename(new_dims) # use lowercase sensor name to be consistent with the rest of satpy - data.attrs.update({'platform_name': self['/attr/Satellite Name'], - 'sensor': self['/attr/Sensor Identification Code'].lower()}) + data.attrs.update({"platform_name": self["/attr/Satellite Name"], + "sensor": self["/attr/Sensor Identification Code"].lower()}) data.attrs.update(ds_info) - units = self.get(file_key + '/attr/units') - if units is not None and str(units).lower() != 'none': - data.attrs.update({'units': self.get(file_key + '/attr/units')}) - elif data.attrs.get('calibration') == 'reflectance': - data.attrs.update({'units': '%'}) + units = self.get(file_key + "/attr/units") + if units is not None and str(units).lower() != "none": + data.attrs.update({"units": self.get(file_key + "/attr/units")}) + elif data.attrs.get("calibration") == "reflectance": + data.attrs.update({"units": "%"}) else: - data.attrs.update({'units': '1'}) + data.attrs.update({"units": "1"}) return data def _calibrate_reflective(self, data, band_index): - if self.platform_id == 'FY3B': + if self.platform_id == "FY3B": coeffs = da.from_array(FY3B_REF_COEFFS, chunks=-1) else: - coeffs = self['/attr/RefSB_Cal_Coefficients'] + coeffs = self["/attr/RefSB_Cal_Coefficients"] slope = self._correct_slope(coeffs[0::2]) intercept = coeffs[1::2] data = data * slope[band_index] + intercept[band_index] return data def _calibrate_emissive(self, data, band_index): - slope = self._correct_slope(self[self.l1b_prefix + 'Emissive_Radiance_Scales']. + slope = self._correct_slope(self[self.l1b_prefix + "Emissive_Radiance_Scales"]. data[:, band_index][:, np.newaxis]) - intercept = self[self.l1b_prefix + 'Emissive_Radiance_Offsets'].data[:, band_index][:, np.newaxis] + intercept = self[self.l1b_prefix + "Emissive_Radiance_Offsets"].data[:, band_index][:, np.newaxis] # Converts cm^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data) # to SI units m^-1, mW*m^-3*str^-1. - wave_number = self['/attr/' + self.wave_number][band_index] * 100 + wave_number = self["/attr/" + self.wave_number][band_index] * 100 bt_data = rad2temp(wave_number, (data.data * slope + intercept) * 1e-5) if isinstance(bt_data, np.ndarray): # old versions of pyspectral produce numpy arrays @@ -161,11 +161,11 @@ def _correct_slope(self, slope): @property def start_time(self): """Get starting observation time.""" - start_time = self['/attr/Observing Beginning Date'] + 'T' + self['/attr/Observing Beginning Time'] + 'Z' - return datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%S.%fZ') + start_time = self["/attr/Observing Beginning Date"] + "T" + self["/attr/Observing Beginning Time"] + "Z" + return datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S.%fZ") @property def end_time(self): """Get ending observation time.""" - end_time = self['/attr/Observing Ending Date'] + 'T' + self['/attr/Observing Ending Time'] + 'Z' - return datetime.strptime(end_time, '%Y-%m-%dT%H:%M:%S.%fZ') + end_time = self["/attr/Observing Ending Date"] + "T" + self["/attr/Observing Ending Time"] + "Z" + return datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S.%fZ") diff --git a/satpy/readers/xmlformat.py b/satpy/readers/xmlformat.py index 0c46a3595e..969c30113a 100644 --- a/satpy/readers/xmlformat.py +++ b/satpy/readers/xmlformat.py @@ -19,9 +19,8 @@ from __future__ import annotations -from xml.etree.ElementTree import ElementTree - import numpy as np +from defusedxml.ElementTree import parse VARIABLES: dict[str, str] = {} @@ -141,8 +140,7 @@ def to_scales(val): def parse_format(xml_file): """Parse the xml file to create types, scaling factor types, and scales.""" - tree = ElementTree() - tree.parse(xml_file) + tree = parse(xml_file) for param in tree.find("parameters"): VARIABLES[param.get("name")] = param.get("value") @@ -204,5 +202,5 @@ def apply_scales(self, array): return _apply_scales(array, *self.translator[array.dtype]) -if __name__ == '__main__': +if __name__ == "__main__": pass diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index 1c4e68d621..ff3599052a 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -87,13 +87,13 @@ def _match_filenames(filenames, pattern): def _verify_reader_info_assign_config_files(config, config_files): try: - reader_info = config['reader'] + reader_info = config["reader"] except KeyError: raise KeyError( "Malformed config file {}: missing reader 'reader'".format( config_files)) else: - reader_info['config_files'] = config_files + reader_info["config_files"] = config_files def load_yaml_configs(*config_files, loader=Loader): @@ -113,9 +113,9 @@ def load_yaml_configs(*config_files, loader=Loader): """ config = {} - logger.debug('Reading %s', str(config_files)) + logger.debug("Reading %s", str(config_files)) for config_file in config_files: - with open(config_file, 'r', encoding='utf-8') as fd: + with open(config_file, "r", encoding="utf-8") as fd: config = recursive_dict_update(config, yaml.load(fd, Loader=loader)) _verify_reader_info_assign_config_files(config, config_files) return config @@ -136,23 +136,23 @@ def __init__(self, config_dict): "deprecated. Use ReaderClass.from_config_files " "instead.") self.config = config_dict - self.info = self.config['reader'] - self.name = self.info['name'] + self.info = self.config["reader"] + self.name = self.info["name"] self.file_patterns = [] - for file_type, filetype_info in self.config['file_types'].items(): - filetype_info.setdefault('file_type', file_type) + for file_type, filetype_info in self.config["file_types"].items(): + filetype_info.setdefault("file_type", file_type) # correct separator if needed - file_patterns = [os.path.join(*pattern.split('/')) - for pattern in filetype_info['file_patterns']] - filetype_info['file_patterns'] = file_patterns + file_patterns = [os.path.join(*pattern.split("/")) + for pattern in filetype_info["file_patterns"]] + filetype_info["file_patterns"] = file_patterns self.file_patterns.extend(file_patterns) - if 'sensors' in self.info and not isinstance(self.info['sensors'], (list, tuple)): - self.info['sensors'] = [self.info['sensors']] - self.datasets = self.config.get('datasets', {}) - self._id_keys = self.info.get('data_identification_keys', default_id_keys_config) - self._co_keys = self.info.get('coord_identification_keys', default_co_keys_config) - self.info['filenames'] = [] + if "sensors" in self.info and not isinstance(self.info["sensors"], (list, tuple)): + self.info["sensors"] = [self.info["sensors"]] + self.datasets = self.config.get("datasets", {}) + self._id_keys = self.info.get("data_identification_keys", default_id_keys_config) + self._co_keys = self.info.get("coord_identification_keys", default_co_keys_config) + self.info["filenames"] = [] self.all_ids = {} self.load_ds_ids_from_config() @@ -160,12 +160,12 @@ def __init__(self, config_dict): def from_config_files(cls, *config_files, **reader_kwargs): """Create a reader instance from one or more YAML configuration files.""" config_dict = load_yaml_configs(*config_files) - return config_dict['reader']['reader'](config_dict, **reader_kwargs) + return config_dict["reader"]["reader"](config_dict, **reader_kwargs) @property def sensor_names(self): """Names of sensors whose data is being loaded by this reader.""" - return self.info['sensors'] or [] + return self.info["sensors"] or [] @property def all_dataset_ids(self): @@ -176,7 +176,7 @@ def all_dataset_ids(self): def all_dataset_names(self): """Get names of all datasets known to this reader.""" # remove the duplicates from various calibration and resolutions - return set(ds_id['name'] for ds_id in self.all_dataset_ids) + return set(ds_id["name"] for ds_id in self.all_dataset_ids) @property def available_dataset_ids(self): @@ -188,7 +188,7 @@ def available_dataset_ids(self): @property def available_dataset_names(self): """Get names of datasets that are loadable by this reader.""" - return (ds_id['name'] for ds_id in self.available_dataset_ids) + return (ds_id["name"] for ds_id in self.available_dataset_ids) @property @abstractmethod @@ -243,7 +243,7 @@ def select_files_from_directory( """ filenames = set() if directory is None: - directory = '' + directory = "" # all the glob patterns that we are going to look at all_globs = {os.path.join(directory, globify(pattern)) for pattern in self.file_patterns} @@ -286,9 +286,9 @@ def load_ds_ids_from_config(self): for dataset in self.datasets.values(): # xarray doesn't like concatenating attributes that are lists # https://github.com/pydata/xarray/issues/2060 - if 'coordinates' in dataset and \ - isinstance(dataset['coordinates'], list): - dataset['coordinates'] = tuple(dataset['coordinates']) + if "coordinates" in dataset and \ + isinstance(dataset["coordinates"], list): + dataset["coordinates"] = tuple(dataset["coordinates"]) id_keys = get_keys_from_config(self._id_keys, dataset) # Build each permutation/product of the dataset @@ -316,10 +316,10 @@ def _build_id_permutations(self, dataset, id_keys): """Build each permutation/product of the dataset.""" id_kwargs = [] for key, idval in id_keys.items(): - val = dataset.get(key, idval.get('default') if idval is not None else None) + val = dataset.get(key, idval.get("default") if idval is not None else None) val_type = None if idval is not None: - val_type = idval.get('type') + val_type = idval.get("type") if val_type is not None and issubclass(val_type, tuple): # special case: wavelength can be [min, nominal, max] # but is still considered 1 option @@ -363,7 +363,7 @@ def __init__(self, self.file_handlers = {} self.available_ids = {} - self.filter_filenames = self.info.get('filter_filenames', filter_filenames) + self.filter_filenames = self.info.get("filter_filenames", filter_filenames) self.filter_parameters = filter_parameters or {} self.register_data_files() @@ -371,7 +371,7 @@ def __init__(self, def sensor_names(self): """Names of sensors whose data is being loaded by this reader.""" if not self.file_handlers: - return self.info['sensors'] + return self.info["sensors"] file_handlers = (handlers[0] for handlers in self.file_handlers.values()) @@ -382,7 +382,7 @@ def sensor_names(self): except NotImplementedError: continue if not sensor_names: - return self.info['sensors'] + return self.info["sensors"] return sorted(sensor_names) @property @@ -453,11 +453,11 @@ def find_required_filehandlers(self, requirements, filename_info): def sorted_filetype_items(self): """Sort the instance's filetypes in using order.""" processed_types = [] - file_type_items = deque(self.config['file_types'].items()) + file_type_items = deque(self.config["file_types"].items()) while len(file_type_items): filetype, filetype_info = file_type_items.popleft() - requirements = filetype_info.get('requires') + requirements = filetype_info.get("requires") if requirements is not None: # requirements have not been processed yet -> wait missing = [req for req in requirements @@ -475,7 +475,7 @@ def filename_items_for_filetype(filenames, filetype_info): if not isinstance(filenames, set): # we perform set operations later on to improve performance filenames = set(filenames) - for pattern in filetype_info['file_patterns']: + for pattern in filetype_info["file_patterns"]: matched_files = set() matches = _match_filenames(filenames, pattern) for filename in matches: @@ -491,8 +491,8 @@ def filename_items_for_filetype(filenames, filetype_info): def _new_filehandler_instances(self, filetype_info, filename_items, fh_kwargs=None): """Generate new filehandler instances.""" - requirements = filetype_info.get('requires') - filetype_cls = filetype_info['file_reader'] + requirements = filetype_info.get("requires") + filetype_cls = filetype_info["file_reader"] if fh_kwargs is None: fh_kwargs = {} @@ -507,15 +507,15 @@ def _new_filehandler_instances(self, filetype_info, filename_items, fh_kwargs=No warnings.warn(msg, stacklevel=4) continue except RuntimeError as err: - warnings.warn(str(err) + ' for {}'.format(filename), stacklevel=4) + warnings.warn(str(err) + " for {}".format(filename), stacklevel=4) continue yield filetype_cls(filename, filename_info, filetype_info, *req_fh, **fh_kwargs) def time_matches(self, fstart, fend): """Check that a file's start and end time mtach filter_parameters of this reader.""" - start_time = self.filter_parameters.get('start_time') - end_time = self.filter_parameters.get('end_time') + start_time = self.filter_parameters.get("start_time") + end_time = self.filter_parameters.get("end_time") fend = fend or fstart if start_time and fend and fend < start_time: return False @@ -527,17 +527,17 @@ def metadata_matches(self, sample_dict, file_handler=None): """Check that file metadata matches filter_parameters of this reader.""" # special handling of start/end times if not self.time_matches( - sample_dict.get('start_time'), sample_dict.get('end_time')): + sample_dict.get("start_time"), sample_dict.get("end_time")): return False for key, val in self.filter_parameters.items(): - if key != 'area' and key not in sample_dict: + if key != "area" and key not in sample_dict: continue - if key in ['start_time', 'end_time']: + if key in ["start_time", "end_time"]: continue - elif key == 'area' and file_handler: + elif key == "area" and file_handler: if not self.check_file_covers_area(file_handler, val): - logger.info('Filtering out %s based on area', + logger.info("Filtering out %s based on area", file_handler.filename) break elif key in sample_dict and val != sample_dict[key]: @@ -556,22 +556,22 @@ def filter_filenames_by_info(self, filename_items): the requested end time. """ for filename, filename_info in filename_items: - fend = filename_info.get('end_time') - fstart = filename_info.setdefault('start_time', fend) + fend = filename_info.get("end_time") + fstart = filename_info.setdefault("start_time", fend) if fend and fend < fstart: # correct for filenames with 1 date and 2 times fend = fend.replace(year=fstart.year, month=fstart.month, day=fstart.day) - filename_info['end_time'] = fend + filename_info["end_time"] = fend if self.metadata_matches(filename_info): yield filename, filename_info def filter_fh_by_metadata(self, filehandlers): """Filter out filehandlers using provide filter parameters.""" for filehandler in filehandlers: - filehandler.metadata['start_time'] = filehandler.start_time - filehandler.metadata['end_time'] = filehandler.end_time + filehandler.metadata["start_time"] = filehandler.start_time + filehandler.metadata["end_time"] = filehandler.end_time if self.metadata_matches(filehandler.metadata, filehandler): yield filehandler @@ -606,9 +606,9 @@ def _new_filehandlers_for_filetype(self, filetype_info, filenames, fh_kwargs=Non def create_filehandlers(self, filenames, fh_kwargs=None): """Organize the filenames into file types and create file handlers.""" filenames = list(OrderedDict.fromkeys(filenames)) - logger.debug("Assigning to %s: %s", self.info['name'], filenames) + logger.debug("Assigning to %s: %s", self.info["name"], filenames) - self.info.setdefault('filenames', []).extend(filenames) + self.info.setdefault("filenames", []).extend(filenames) filename_set = set(filenames) created_fhs = {} # load files that we know about by creating the file handlers @@ -670,13 +670,13 @@ def update_ds_ids_from_file_handlers(self): new_ids = {} for is_avail, ds_info in avail_datasets: # especially from the yaml config - coordinates = ds_info.get('coordinates') + coordinates = ds_info.get("coordinates") if isinstance(coordinates, list): # xarray doesn't like concatenating attributes that are # lists: https://github.com/pydata/xarray/issues/2060 - ds_info['coordinates'] = tuple(ds_info['coordinates']) + ds_info["coordinates"] = tuple(ds_info["coordinates"]) - ds_info.setdefault('modifiers', tuple()) # default to no mods + ds_info.setdefault("modifiers", tuple()) # default to no mods # Create DataID for this dataset ds_id = DataID(self._id_keys, **ds_info) @@ -690,7 +690,7 @@ def update_ds_ids_from_file_handlers(self): self.all_ids = new_ids @staticmethod - def _load_dataset(dsid, ds_info, file_handlers, dim='y', **kwargs): + def _load_dataset(dsid, ds_info, file_handlers, dim="y", **kwargs): """Load only a piece of the dataset.""" slice_list = [] failure = True @@ -723,9 +723,9 @@ def _load_dataset_data(self, file_handlers, dsid, **kwargs): proj = self._load_dataset(dsid, ds_info, file_handlers, **kwargs) # FIXME: areas could be concatenated here # Update the metadata - proj.attrs['start_time'] = file_handlers[0].start_time - proj.attrs['end_time'] = file_handlers[-1].end_time - proj.attrs['reader'] = self.name + proj.attrs["start_time"] = file_handlers[0].start_time + proj.attrs["end_time"] = file_handlers[-1].end_time + proj.attrs["reader"] = self.name return proj def _preferred_filetype(self, filetypes): @@ -750,10 +750,10 @@ def _get_file_handlers(self, dsid): """Get the file handler to load this dataset.""" ds_info = self.all_ids[dsid] - filetype = self._preferred_filetype(ds_info['file_type']) + filetype = self._preferred_filetype(ds_info["file_type"]) if filetype is None: logger.warning("Required file type '%s' not found or loaded for " - "'%s'", ds_info['file_type'], dsid['name']) + "'%s'", ds_info["file_type"], dsid["name"]) else: return self.file_handlers[filetype] @@ -786,12 +786,12 @@ def _get_lons_lats_from_coords(self, coords): """Get lons and lats from the coords list.""" lons, lats = None, None for coord in coords: - if coord.attrs.get('standard_name') == 'longitude': + if coord.attrs.get("standard_name") == "longitude": lons = coord - elif coord.attrs.get('standard_name') == 'latitude': + elif coord.attrs.get("standard_name") == "latitude": lats = coord if lons is None or lats is None: - raise ValueError('Missing longitude or latitude coordinate: ' + str(coords)) + raise ValueError("Missing longitude or latitude coordinate: " + str(coords)) return lons, lats def _make_swath_definition_from_lons_lats(self, lons, lats): @@ -804,11 +804,11 @@ def _make_swath_definition_from_lons_lats(self, lons, lats): sdef = None if sdef is None: sdef = SwathDefinition(lons, lats) - sensor_str = '_'.join(self.info['sensors']) - shape_str = '_'.join(map(str, lons.shape)) + sensor_str = "_".join(self.info["sensors"]) + shape_str = "_".join(map(str, lons.shape)) sdef.name = "{}_{}_{}_{}".format(sensor_str, shape_str, - lons.attrs.get('name', lons.name), - lats.attrs.get('name', lats.name)) + lons.attrs.get("name", lons.name), + lats.attrs.get("name", lats.name)) if key is not None: FileYAMLReader._coords_cache[key] = sdef return sdef @@ -830,7 +830,7 @@ def _load_dataset_with_area(self, dsid, coords, **kwargs): area = self._load_dataset_area(dsid, file_handlers, coords, **kwargs) if area is not None: - ds.attrs['area'] = area + ds.attrs["area"] = area ds = add_crs_xy_coords(ds, area) return ds @@ -840,7 +840,7 @@ def _assign_coords_from_dataarray(coords, ds): if not coords: coords = [] for coord in ds.coords.values(): - if coord.attrs.get('standard_name') in ['longitude', 'latitude']: + if coord.attrs.get("standard_name") in ["longitude", "latitude"]: coords.append(coord) return coords @@ -855,12 +855,12 @@ def _load_ancillary_variables(self, datasets, **kwargs): for dataset in datasets.values(): new_vars = [] - for av_id in dataset.attrs.get('ancillary_variables', []): + for av_id in dataset.attrs.get("ancillary_variables", []): if isinstance(av_id, DataID): new_vars.append(datasets[av_id]) else: new_vars.append(av_id) - dataset.attrs['ancillary_variables'] = new_vars + dataset.attrs["ancillary_variables"] = new_vars def _gather_ancillary_variables_ids(self, datasets): """Gather ancillary variables' ids. @@ -869,9 +869,9 @@ def _gather_ancillary_variables_ids(self, datasets): """ all_av_ids = set() for dataset in datasets.values(): - ancillary_variables = dataset.attrs.get('ancillary_variables', []) + ancillary_variables = dataset.attrs.get("ancillary_variables", []) if not isinstance(ancillary_variables, (list, tuple, set)): - ancillary_variables = ancillary_variables.split(' ') + ancillary_variables = ancillary_variables.split(" ") av_ids = [] for key in ancillary_variables: try: @@ -880,7 +880,7 @@ def _gather_ancillary_variables_ids(self, datasets): logger.warning("Can't load ancillary dataset %s", str(key)) all_av_ids |= set(av_ids) - dataset.attrs['ancillary_variables'] = av_ids + dataset.attrs["ancillary_variables"] = av_ids return all_av_ids def get_dataset_key(self, key, available_only=False, **kwargs): @@ -953,12 +953,12 @@ def _get_coordinates_for_dataset_key(self, dsid): """Get the coordinate dataset keys for *dsid*.""" ds_info = self.all_ids[dsid] cids = [] - for cinfo in ds_info.get('coordinates', []): + for cinfo in ds_info.get("coordinates", []): if not isinstance(cinfo, dict): - cinfo = {'name': cinfo} + cinfo = {"name": cinfo} for key in self._co_keys: - if key == 'name': + if key == "name": continue if key in ds_info: if ds_info[key] is not None: @@ -995,52 +995,52 @@ def _set_orientation(dataset, upper_right_corner): """ # do some checks and early returns - if upper_right_corner == 'native': + if upper_right_corner == "native": logger.debug("Requested orientation for Dataset {} is 'native' (default). " - "No flipping is applied.".format(dataset.attrs.get('name'))) + "No flipping is applied.".format(dataset.attrs.get("name"))) return dataset - if upper_right_corner not in ['NW', 'NE', 'SE', 'SW', 'native']: + if upper_right_corner not in ["NW", "NE", "SE", "SW", "native"]: raise ValueError("Target orientation for Dataset {} not recognized. " "Kwarg upper_right_corner should be " - "'NW', 'NE', 'SW', 'SE' or 'native'.".format(dataset.attrs.get('name', 'unknown_name'))) + "'NW', 'NE', 'SW', 'SE' or 'native'.".format(dataset.attrs.get("name", "unknown_name"))) - if 'area' not in dataset.attrs: + if "area" not in dataset.attrs: logger.info("Dataset {} is missing the area attribute " - "and will not be flipped.".format(dataset.attrs.get('name', 'unknown_name'))) + "and will not be flipped.".format(dataset.attrs.get("name", "unknown_name"))) return dataset - if isinstance(dataset.attrs['area'], SwathDefinition): + if isinstance(dataset.attrs["area"], SwathDefinition): logger.info("Dataset {} is in a SwathDefinition " - "and will not be flipped.".format(dataset.attrs.get('name', 'unknown_name'))) + "and will not be flipped.".format(dataset.attrs.get("name", "unknown_name"))) return dataset - projection_type = _get_projection_type(dataset.attrs['area']) - accepted_geos_proj_types = ['Geostationary Satellite (Sweep Y)', 'Geostationary Satellite (Sweep X)'] + projection_type = _get_projection_type(dataset.attrs["area"]) + accepted_geos_proj_types = ["Geostationary Satellite (Sweep Y)", "Geostationary Satellite (Sweep X)"] if projection_type not in accepted_geos_proj_types: logger.info("Dataset {} is not in one of the known geostationary projections {} " - "and cannot be flipped.".format(dataset.attrs.get('name', 'unknown_name'), + "and cannot be flipped.".format(dataset.attrs.get("name", "unknown_name"), accepted_geos_proj_types)) return dataset target_eastright, target_northup = _get_target_scene_orientation(upper_right_corner) - area_extents_to_update = _get_dataset_area_extents_array(dataset.attrs['area']) + area_extents_to_update = _get_dataset_area_extents_array(dataset.attrs["area"]) current_eastright, current_northup = _get_current_scene_orientation(area_extents_to_update) if target_northup == current_northup and target_eastright == current_eastright: logger.info("Dataset {} is already in the target orientation " - "and will not be flipped.".format(dataset.attrs.get('name', 'unknown_name'))) + "and will not be flipped.".format(dataset.attrs.get("name", "unknown_name"))) return dataset if target_northup != current_northup: dataset, area_extents_to_update = _flip_dataset_data_and_area_extents(dataset, area_extents_to_update, - 'upsidedown') + "upsidedown") if target_eastright != current_eastright: dataset, area_extents_to_update = _flip_dataset_data_and_area_extents(dataset, area_extents_to_update, - 'leftright') + "leftright") - dataset.attrs['area'] = _get_new_flipped_area_definition(dataset.attrs['area'], area_extents_to_update, + dataset.attrs["area"] = _get_new_flipped_area_definition(dataset.attrs["area"], area_extents_to_update, flip_areadef_stacking=target_northup != current_northup) return dataset @@ -1062,9 +1062,9 @@ def _get_target_scene_orientation(upper_right_corner): 'NE' corresponds to target_eastright and target_northup being True. """ - target_northup = upper_right_corner in ['NW', 'NE'] + target_northup = upper_right_corner in ["NW", "NE"] - target_eastright = upper_right_corner in ['NE', 'SE'] + target_eastright = upper_right_corner in ["NE", "SE"] return target_eastright, target_northup @@ -1091,11 +1091,11 @@ def _get_current_scene_orientation(area_extents_to_update): def _flip_dataset_data_and_area_extents(dataset, area_extents_to_update, flip_direction): """Flip the data and area extents array for a dataset.""" - logger.info("Flipping Dataset {} {}.".format(dataset.attrs.get('name', 'unknown_name'), flip_direction)) - if flip_direction == 'upsidedown': + logger.info("Flipping Dataset {} {}.".format(dataset.attrs.get("name", "unknown_name"), flip_direction)) + if flip_direction == "upsidedown": dataset = dataset[::-1, :] area_extents_to_update[:, [1, 3]] = area_extents_to_update[:, [3, 1]] - elif flip_direction == 'leftright': + elif flip_direction == "leftright": dataset = dataset[:, ::-1] area_extents_to_update[:, [0, 2]] = area_extents_to_update[:, [2, 0]] else: @@ -1128,7 +1128,7 @@ def _get_new_flipped_area_definition(dataset_area_attr, area_extents_to_update, class GEOFlippableFileYAMLReader(FileYAMLReader): """Reader for flippable geostationary data.""" - def _load_dataset_with_area(self, dsid, coords, upper_right_corner='native', **kwargs): + def _load_dataset_with_area(self, dsid, coords, upper_right_corner="native", **kwargs): ds = super(GEOFlippableFileYAMLReader, self)._load_dataset_with_area(dsid, coords, **kwargs) if ds is not None: @@ -1165,15 +1165,15 @@ def create_filehandlers(self, filenames, fh_kwargs=None): for fhs in created_fhs.values(): for fh in fhs: # check the filename for total_segments parameter as a fallback - ts = fh.filename_info.get('total_segments', 1) + ts = fh.filename_info.get("total_segments", 1) # if the YAML has segments explicitly specified then use that - fh.filetype_info.setdefault('expected_segments', ts) + fh.filetype_info.setdefault("expected_segments", ts) # add segment key-values for FCI filehandlers - if 'segment' not in fh.filename_info: - fh.filename_info['segment'] = fh.filename_info.get('count_in_repeat_cycle', 1) + if "segment" not in fh.filename_info: + fh.filename_info["segment"] = fh.filename_info.get("count_in_repeat_cycle", 1) return created_fhs - def _load_dataset(self, dsid, ds_info, file_handlers, dim='y', pad_data=True): + def _load_dataset(self, dsid, ds_info, file_handlers, dim="y", pad_data=True): """Load only a piece of the dataset.""" if not pad_data: return FileYAMLReader._load_dataset(dsid, ds_info, @@ -1186,7 +1186,7 @@ def _load_dataset(self, dsid, ds_info, file_handlers, dim='y', pad_data=True): raise KeyError( "Could not load {} from any provided files".format(dsid)) - filetype = file_handlers[0].filetype_info['file_type'] + filetype = file_handlers[0].filetype_info["file_type"] self.empty_segment = xr.full_like(projectable, np.nan) for i, sli in enumerate(slice_list): if sli is None: @@ -1230,9 +1230,9 @@ def _load_area_def_with_padding(self, dsid, file_handlers): def _pad_later_segments_area(self, file_handlers, dsid): """Pad area definitions for missing segments that are later in sequence than the first available.""" - expected_segments = file_handlers[0].filetype_info['expected_segments'] - filetype = file_handlers[0].filetype_info['file_type'] - available_segments = [int(fh.filename_info.get('segment', 1)) for + expected_segments = file_handlers[0].filetype_info["expected_segments"] + filetype = file_handlers[0].filetype_info["file_type"] + available_segments = [int(fh.filename_info.get("segment", 1)) for fh in file_handlers] area_defs = self._get_segments_areadef_with_later_padded(file_handlers, filetype, dsid, available_segments, @@ -1250,7 +1250,7 @@ def _get_segments_areadef_with_later_padded(self, file_handlers, filetype, dsid, fh = file_handlers[idx] area = fh.get_area_def(dsid) except ValueError: - area = self._get_new_areadef_for_padded_segment(area, filetype, seg_size, segment, padding_type='later') + area = self._get_new_areadef_for_padded_segment(area, filetype, seg_size, segment, padding_type="later") area_defs[segment] = area seg_size = area.shape @@ -1258,14 +1258,14 @@ def _get_segments_areadef_with_later_padded(self, file_handlers, filetype, dsid, def _pad_earlier_segments_area(self, file_handlers, dsid, area_defs): """Pad area definitions for missing segments that are earlier in sequence than the first available.""" - available_segments = [int(fh.filename_info.get('segment', 1)) for + available_segments = [int(fh.filename_info.get("segment", 1)) for fh in file_handlers] area = file_handlers[0].get_area_def(dsid) seg_size = area.shape - filetype = file_handlers[0].filetype_info['file_type'] + filetype = file_handlers[0].filetype_info["file_type"] for segment in range(available_segments[0] - 1, 0, -1): - area = self._get_new_areadef_for_padded_segment(area, filetype, seg_size, segment, padding_type='earlier') + area = self._get_new_areadef_for_padded_segment(area, filetype, seg_size, segment, padding_type="earlier") area_defs[segment] = area seg_size = area.shape @@ -1278,7 +1278,7 @@ def _get_new_areadef_for_padded_segment(self, area, filetype, seg_size, segment, fill_extent = (area.area_extent[0], new_ll_y, area.area_extent[2], new_ur_y) - area = AreaDefinition('fill', 'fill', 'fill', area.crs, + area = AreaDefinition("fill", "fill", "fill", area.crs, seg_size[1], new_height_px, fill_extent) return area @@ -1287,10 +1287,10 @@ def _get_y_area_extents_for_padded_segment(self, area, filetype, padding_type, s new_height_proj_coord, new_height_px = self._get_new_areadef_heights(area, seg_size, segment_n=segment, filetype=filetype) - if padding_type == 'later': + if padding_type == "later": new_ll_y = area.area_extent[1] + new_height_proj_coord new_ur_y = area.area_extent[1] - elif padding_type == 'earlier': + elif padding_type == "earlier": new_ll_y = area.area_extent[3] new_ur_y = area.area_extent[3] - new_height_proj_coord else: @@ -1324,13 +1324,13 @@ def _find_missing_segments(file_handlers, ds_info, dsid): expected_segments = 1 # get list of file handlers in segment order # (ex. first segment, second segment, etc) - handlers = sorted(file_handlers, key=lambda x: x.filename_info.get('segment', 1)) + handlers = sorted(file_handlers, key=lambda x: x.filename_info.get("segment", 1)) projectable = None for fh in handlers: - if fh.filetype_info['file_type'] in ds_info['file_type']: - expected_segments = fh.filetype_info['expected_segments'] + if fh.filetype_info["file_type"] in ds_info["file_type"]: + expected_segments = fh.filetype_info["expected_segments"] - while int(fh.filename_info.get('segment', 1)) > counter: + while int(fh.filename_info.get("segment", 1)) > counter: slice_list.append(None) counter += 1 try: @@ -1396,17 +1396,17 @@ def _collect_segment_position_infos(self, filetype): # collect the segment positioning infos for all available segments for fh in self.file_handlers[filetype]: chk_infos = fh.get_segment_position_info() - chk_infos.update({'segment_nr': fh.filename_info['segment'] - 1}) - self.segment_infos[filetype]['available_segment_infos'].append(chk_infos) + chk_infos.update({"segment_nr": fh.filename_info["segment"] - 1}) + self.segment_infos[filetype]["available_segment_infos"].append(chk_infos) def _initialise_segment_infos(self, filetype): # initialise the segment info for this filetype filetype_fhs_sample = self.file_handlers[filetype][0] - exp_segment_nr = filetype_fhs_sample.filetype_info['expected_segments'] + exp_segment_nr = filetype_fhs_sample.filetype_info["expected_segments"] grid_width_to_grid_type = _get_grid_width_to_grid_type(filetype_fhs_sample.get_segment_position_info()) - self.segment_infos.update({filetype: {'available_segment_infos': [], - 'expected_segments': exp_segment_nr, - 'grid_width_to_grid_type': grid_width_to_grid_type}}) + self.segment_infos.update({filetype: {"available_segment_infos": [], + "expected_segments": exp_segment_nr, + "grid_width_to_grid_type": grid_width_to_grid_type}}) def _get_empty_segment(self, dim=None, idx=None, filetype=None): grid_width = self.empty_segment.shape[1] @@ -1416,7 +1416,7 @@ def _get_empty_segment(self, dim=None, idx=None, filetype=None): def _segment_heights(self, filetype, grid_width): """Compute optimal padded segment heights (in number of pixels) based on the location of available segments.""" self._extract_segment_location_dicts(filetype) - grid_type = self.segment_infos[filetype]['grid_width_to_grid_type'][grid_width] + grid_type = self.segment_infos[filetype]["grid_width_to_grid_type"][grid_width] segment_heights = _compute_optimal_missing_segment_heights(self.segment_infos[filetype], grid_type, grid_width) return segment_heights @@ -1434,18 +1434,18 @@ def _get_new_areadef_heights(self, previous_area, previous_seg_size, segment_n=N def _get_grid_width_to_grid_type(seg_info): grid_width_to_grid_type = dict() for grid_type, grid_type_seg_info in seg_info.items(): - grid_width_to_grid_type.update({grid_type_seg_info['grid_width']: grid_type}) + grid_width_to_grid_type.update({grid_type_seg_info["grid_width"]: grid_type}) return grid_width_to_grid_type def _compute_optimal_missing_segment_heights(seg_infos, grid_type, expected_vertical_size): # initialise positioning arrays segment_start_rows, segment_end_rows, segment_heights = _init_positioning_arrays_for_variable_padding( - seg_infos['available_segment_infos'], grid_type, seg_infos['expected_segments']) + seg_infos["available_segment_infos"], grid_type, seg_infos["expected_segments"]) # populate start row of first segment and end row of last segment with known values segment_start_rows[0] = 1 - segment_end_rows[seg_infos['expected_segments'] - 1] = expected_vertical_size + segment_end_rows[seg_infos["expected_segments"] - 1] = expected_vertical_size # find missing segments and group contiguous missing segments together missing_segments = np.where(segment_heights == 0)[0] @@ -1454,7 +1454,7 @@ def _compute_optimal_missing_segment_heights(seg_infos, grid_type, expected_vert for group in groups_missing_segments: _compute_positioning_data_for_missing_group(segment_start_rows, segment_end_rows, segment_heights, group) - return segment_heights.astype('int') + return segment_heights.astype("int") def _compute_positioning_data_for_missing_group(segment_start_rows, segment_end_rows, segment_heights, group): @@ -1513,20 +1513,20 @@ def _init_positioning_arrays_for_variable_padding(chk_infos, grid_type, exp_segm def _populate_positioning_arrays_with_available_segment_info(chk_infos, grid_type, segment_start_rows, segment_end_rows, segment_heights): for chk_info in chk_infos: - current_fh_segment_nr = chk_info['segment_nr'] - segment_heights[current_fh_segment_nr] = chk_info[grid_type]['segment_height'] - segment_start_rows[current_fh_segment_nr] = chk_info[grid_type]['start_position_row'] - segment_end_rows[current_fh_segment_nr] = chk_info[grid_type]['end_position_row'] + current_fh_segment_nr = chk_info["segment_nr"] + segment_heights[current_fh_segment_nr] = chk_info[grid_type]["segment_height"] + segment_start_rows[current_fh_segment_nr] = chk_info[grid_type]["start_position_row"] + segment_end_rows[current_fh_segment_nr] = chk_info[grid_type]["end_position_row"] def split_integer_in_most_equal_parts(x, n): """Split an integer number x in n parts that are as equally-sizes as possible.""" if x % n == 0: - return np.repeat(x // n, n).astype('int') + return np.repeat(x // n, n).astype("int") else: # split the remainder amount over the last remainder parts remainder = int(x % n) mod = int(x // n) ar = np.repeat(mod, n) ar[-remainder:] = mod + 1 - return ar.astype('int') + return ar.astype("int") diff --git a/satpy/resample.py b/satpy/resample.py index d011b20aa2..c8ed073ae5 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -164,16 +164,16 @@ CHUNK_SIZE = get_legacy_chunk_size() CACHE_SIZE = 10 -NN_COORDINATES = {'valid_input_index': ('y1', 'x1'), - 'valid_output_index': ('y2', 'x2'), - 'index_array': ('y2', 'x2', 'z2')} -BIL_COORDINATES = {'bilinear_s': ('x1', ), - 'bilinear_t': ('x1', ), - 'slices_x': ('x1', 'n'), - 'slices_y': ('x1', 'n'), - 'mask_slices': ('x1', 'n'), - 'out_coords_x': ('x2', ), - 'out_coords_y': ('y2', )} +NN_COORDINATES = {"valid_input_index": ("y1", "x1"), + "valid_output_index": ("y2", "x2"), + "index_array": ("y2", "x2", "z2")} +BIL_COORDINATES = {"bilinear_s": ("x1", ), + "bilinear_t": ("x1", ), + "slices_x": ("x1", "n"), + "slices_y": ("x1", "n"), + "mask_slices": ("x1", "n"), + "out_coords_x": ("x2", ), + "out_coords_y": ("y2", )} resamplers_cache: "WeakValueDictionary[tuple, object]" = WeakValueDictionary() @@ -184,7 +184,7 @@ def hash_dict(the_dict, the_hash=None): """Calculate a hash for a dictionary.""" if the_hash is None: the_hash = hashlib.sha1() # nosec - the_hash.update(json.dumps(the_dict, sort_keys=True).encode('utf-8')) + the_hash.update(json.dumps(the_dict, sort_keys=True).encode("utf-8")) return the_hash @@ -193,11 +193,11 @@ def get_area_file(): The files are to be named `areas.yaml` or `areas.def`. """ - paths = config_search_paths('areas.yaml') + paths = config_search_paths("areas.yaml") if paths: return paths else: - return get_config_path('areas.def') + return get_config_path("areas.def") def get_area_def(area_name): @@ -229,13 +229,13 @@ def add_xy_coords(data_arr, area, crs=None): Returns (xarray.DataArray): Updated DataArray object """ - if 'x' in data_arr.coords and 'y' in data_arr.coords: + if "x" in data_arr.coords and "y" in data_arr.coords: # x/y coords already provided return data_arr - if 'x' not in data_arr.dims or 'y' not in data_arr.dims: + if "x" not in data_arr.dims or "y" not in data_arr.dims: # no defined x and y dimensions return data_arr - if not hasattr(area, 'get_proj_vectors'): + if not hasattr(area, "get_proj_vectors"): return data_arr x, y = area.get_proj_vectors() @@ -245,15 +245,15 @@ def add_xy_coords(data_arr, area, crs=None): if crs is not None: units = crs.axis_info[0].unit_name # fix udunits/CF standard units - units = units.replace('metre', 'meter') - if units == 'degree': - y_attrs['units'] = 'degrees_north' - x_attrs['units'] = 'degrees_east' + units = units.replace("metre", "meter") + if units == "degree": + y_attrs["units"] = "degrees_north" + x_attrs["units"] = "degrees_east" else: - y_attrs['units'] = units - x_attrs['units'] = units - y = xr.DataArray(y, dims=('y',), attrs=y_attrs) - x = xr.DataArray(x, dims=('x',), attrs=x_attrs) + y_attrs["units"] = units + x_attrs["units"] = units + y = xr.DataArray(y, dims=("y",), attrs=y_attrs) + x = xr.DataArray(x, dims=("x",), attrs=x_attrs) return data_arr.assign_coords(y=y, x=x) @@ -283,10 +283,10 @@ def add_crs_xy_coords(data_arr, area): # default lat/lon projection latlon_proj = "+proj=latlong +datum=WGS84 +ellps=WGS84" # otherwise get it from the area definition - if hasattr(area, 'crs'): + if hasattr(area, "crs"): crs = area.crs else: - proj_str = getattr(area, 'proj_str', latlon_proj) + proj_str = getattr(area, "proj_str", latlon_proj) crs = CRS.from_string(proj_str) data_arr = data_arr.assign_coords(crs=crs) @@ -299,12 +299,12 @@ def add_crs_xy_coords(data_arr, area): # array). lons = area.lons lats = area.lats - lons.attrs.setdefault('standard_name', 'longitude') - lons.attrs.setdefault('long_name', 'longitude') - lons.attrs.setdefault('units', 'degrees_east') - lats.attrs.setdefault('standard_name', 'latitude') - lats.attrs.setdefault('long_name', 'latitude') - lats.attrs.setdefault('units', 'degrees_north') + lons.attrs.setdefault("standard_name", "longitude") + lons.attrs.setdefault("long_name", "longitude") + lons.attrs.setdefault("units", "degrees_east") + lats.attrs.setdefault("standard_name", "latitude") + lats.attrs.setdefault("long_name", "latitude") + lats.attrs.setdefault("units", "degrees_north") # See https://github.com/pydata/xarray/issues/3068 # data_arr = data_arr.assign_coords(longitude=lons, latitude=lats) else: @@ -327,7 +327,7 @@ def update_resampled_coords(old_data, new_data, new_area): # this *MUST* happen before we set 'crs' below otherwise any 'crs' # coordinate in the coordinate variables we are copying will overwrite the # 'crs' coordinate we just assigned to the data - ignore_coords = ('y', 'x', 'crs') + ignore_coords = ("y", "x", "crs") new_coords = {} for cname, cval in old_data.coords.items(): # we don't want coordinates that depended on the old x/y dimensions @@ -387,7 +387,7 @@ def precompute(self, mask=None, radius_of_influence=None, epsilon=0, "masked pixels. Will not cache results.") cache_dir = None - if radius_of_influence is None and not hasattr(self.source_geo_def, 'geocentric_resolution'): + if radius_of_influence is None and not hasattr(self.source_geo_def, "geocentric_resolution"): radius_of_influence = self._adjust_radius_of_influence(radius_of_influence) kwargs = dict(source_geo_def=self.source_geo_def, @@ -438,7 +438,7 @@ def _apply_cached_index(self, val, idx_name, persist=False): def load_neighbour_info(self, cache_dir, mask=None, **kwargs): """Read index arrays from either the in-memory or disk cache.""" - mask_name = getattr(mask, 'name', None) + mask_name = getattr(mask, "name", None) cached = {} for idx_name in NN_COORDINATES: if mask_name in self._index_caches: @@ -447,11 +447,11 @@ def load_neighbour_info(self, cache_dir, mask=None, **kwargs): elif cache_dir: try: filename = self._create_cache_filename( - cache_dir, prefix='nn_lut-', + cache_dir, prefix="nn_lut-", mask=mask_name, **kwargs) - fid = zarr.open(filename, 'r') + fid = zarr.open(filename, "r") cache = np.array(fid[idx_name]) - if idx_name == 'valid_input_index': + if idx_name == "valid_input_index": # valid input index array needs to be boolean cache = cache.astype(bool) except ValueError: @@ -465,11 +465,11 @@ def load_neighbour_info(self, cache_dir, mask=None, **kwargs): def save_neighbour_info(self, cache_dir, mask=None, **kwargs): """Cache resampler's index arrays if there is a cache dir.""" if cache_dir: - mask_name = getattr(mask, 'name', None) + mask_name = getattr(mask, "name", None) cache = self._read_resampler_attrs() filename = self._create_cache_filename( - cache_dir, prefix='nn_lut-', mask=mask_name, **kwargs) - LOG.info('Saving kd_tree neighbour info to %s', filename) + cache_dir, prefix="nn_lut-", mask=mask_name, **kwargs) + LOG.info("Saving kd_tree neighbour info to %s", filename) zarr_out = xr.Dataset() for idx_name, coord in NN_COORDINATES.items(): # update the cache in place with persisted dask arrays @@ -554,7 +554,7 @@ def load_bil_info(self, cache_dir, **kwargs): """Load bilinear resampling info from cache directory.""" if cache_dir: filename = self._create_cache_filename(cache_dir, - prefix='bil_lut-', + prefix="bil_lut-", **kwargs) try: self.resampler.load_resampling_info(filename) @@ -572,12 +572,12 @@ def save_bil_info(self, cache_dir, **kwargs): """Save bilinear resampling info to cache directory.""" if cache_dir: filename = self._create_cache_filename(cache_dir, - prefix='bil_lut-', + prefix="bil_lut-", **kwargs) # There are some old caches, move them out of the way if os.path.exists(filename): _move_existing_caches(cache_dir, filename) - LOG.info('Saving BIL neighbour info to %s', filename) + LOG.info("Saving BIL neighbour info to %s", filename) try: self.resampler.save_resampling_info(filename) except AttributeError: @@ -592,7 +592,7 @@ def compute(self, data, fill_value=None, **kwargs): del kwargs if fill_value is None: - fill_value = data.attrs.get('_FillValue') + fill_value = data.attrs.get("_FillValue") target_shape = self.target_geo_def.shape res = self.resampler.get_sample_from_bil_info(data, @@ -606,7 +606,7 @@ def _move_existing_caches(cache_dir, filename): """Move existing cache files out of the way.""" import os import shutil - old_cache_dir = os.path.join(cache_dir, 'moved_by_satpy') + old_cache_dir = os.path.join(cache_dir, "moved_by_satpy") try: os.makedirs(old_cache_dir) except FileExistsError: @@ -631,7 +631,7 @@ def _mean(data, y_size, x_size): def _repeat_by_factor(data, block_info=None): if block_info is None: return data - out_shape = block_info[None]['chunk-shape'] + out_shape = block_info[None]["chunk-shape"] out_data = data for axis, axis_size in enumerate(out_shape): in_size = data.shape[axis] @@ -689,15 +689,15 @@ def compute(self, data, expand=True, **kwargs): target_geo_def = self.target_geo_def # convert xarray backed with numpy array to dask array - if 'x' not in data.dims or 'y' not in data.dims: + if "x" not in data.dims or "y" not in data.dims: if data.ndim not in [2, 3]: raise ValueError("Can only handle 2D or 3D arrays without dimensions.") # assume rows is the second to last axis y_axis = data.ndim - 2 x_axis = data.ndim - 1 else: - y_axis = data.dims.index('y') - x_axis = data.dims.index('x') + y_axis = data.dims.index("y") + x_axis = data.dims.index("x") out_shape = target_geo_def.shape in_shape = data.shape @@ -778,24 +778,24 @@ def _get_arg_to_pass_for_skipna_handling(**kwargs): # FIXME this can be removed once Pyresample 1.18.0 is a Satpy requirement if PR_USE_SKIPNA: - if 'mask_all_nan' in kwargs: + if "mask_all_nan" in kwargs: warnings.warn( - 'Argument mask_all_nan is deprecated. Please use skipna for missing values handling. ' - 'Continuing with default skipna=True, if not provided differently.', + "Argument mask_all_nan is deprecated. Please use skipna for missing values handling. " + "Continuing with default skipna=True, if not provided differently.", DeprecationWarning, stacklevel=3 ) - kwargs.pop('mask_all_nan') + kwargs.pop("mask_all_nan") else: - if 'mask_all_nan' in kwargs: + if "mask_all_nan" in kwargs: warnings.warn( - 'Argument mask_all_nan is deprecated.' - 'Please update Pyresample and use skipna for missing values handling.', + "Argument mask_all_nan is deprecated." + "Please update Pyresample and use skipna for missing values handling.", DeprecationWarning, stacklevel=3 ) - kwargs.setdefault('mask_all_nan', False) - kwargs.pop('skipna') + kwargs.setdefault("mask_all_nan", False) + kwargs.pop("skipna") return kwargs @@ -832,32 +832,32 @@ def resample(self, data, **kwargs): Returns (xarray.DataArray): Data resampled to the target area """ - if not PR_USE_SKIPNA and 'skipna' in kwargs: - raise ValueError('You are trying to set the skipna argument but you are using an old version of' - ' Pyresample that does not support it.' - 'Please update Pyresample to 1.18.0 or higher to be able to use this argument.') + if not PR_USE_SKIPNA and "skipna" in kwargs: + raise ValueError("You are trying to set the skipna argument but you are using an old version of" + " Pyresample that does not support it." + "Please update Pyresample to 1.18.0 or higher to be able to use this argument.") self.precompute(**kwargs) attrs = data.attrs.copy() data_arr = data.data - if data.ndim == 3 and data.dims[0] == 'bands': - dims = ('bands', 'y', 'x') + if data.ndim == 3 and data.dims[0] == "bands": + dims = ("bands", "y", "x") # Both one and two dimensional input data results in 2D output elif data.ndim in (1, 2): - dims = ('y', 'x') + dims = ("y", "x") else: dims = data.dims - LOG.debug("Resampling %s", str(data.attrs.get('_satpy_id', 'unknown'))) + LOG.debug("Resampling %s", str(data.attrs.get("_satpy_id", "unknown"))) result = self.compute(data_arr, **kwargs) coords = {} - if 'bands' in data.coords: - coords['bands'] = data.coords['bands'] + if "bands" in data.coords: + coords["bands"] = data.coords["bands"] # Fractions are returned in a dict elif isinstance(result, dict): - coords['categories'] = sorted(result.keys()) - dims = ('categories', 'y', 'x') + coords["categories"] = sorted(result.keys()) + dims = ("categories", "y", "x") new_result = [] - for cat in coords['categories']: + for cat in coords["categories"]: new_result.append(result[cat]) result = da.stack(new_result) if result.ndim > len(dims): @@ -865,13 +865,13 @@ def resample(self, data, **kwargs): # Adjust some attributes if "BucketFraction" in str(self): - attrs['units'] = '' - attrs['calibration'] = '' - attrs['standard_name'] = 'area_fraction' + attrs["units"] = "" + attrs["calibration"] = "" + attrs["standard_name"] = "area_fraction" elif "BucketCount" in str(self): - attrs['units'] = '' - attrs['calibration'] = '' - attrs['standard_name'] = 'number_of_observations' + attrs["units"] = "" + attrs["calibration"] = "" + attrs["standard_name"] = "number_of_observations" result = xr.DataArray(result, dims=dims, coords=coords, attrs=attrs) @@ -1024,7 +1024,7 @@ def prepare_resampler(source_area, destination_area, resampler=None, **resample_ """Instantiate and return a resampler.""" if resampler is None: LOG.info("Using default KDTree resampler") - resampler = 'kd_tree' + resampler = "kd_tree" if isinstance(resampler, PRBaseResampler): raise ValueError("Trying to create a resampler when one already " @@ -1034,7 +1034,7 @@ def prepare_resampler(source_area, destination_area, resampler=None, **resample_ if resampler_class is None: if resampler == "gradient_search": warnings.warn( - 'Gradient search resampler not available. Maybe missing `shapely`?', + "Gradient search resampler not available. Maybe missing `shapely`?", stacklevel=2 ) raise KeyError("Resampler '%s' not available" % resampler) @@ -1075,7 +1075,7 @@ def resample(source_area, data, destination_area, def get_fill_value(dataset): """Get the fill value of the *dataset*, defaulting to np.nan.""" if np.issubdtype(dataset.dtype, np.integer): - return dataset.attrs.get('_FillValue', np.nan) + return dataset.attrs.get("_FillValue", np.nan) return np.nan @@ -1099,11 +1099,11 @@ def resample_dataset(dataset, destination_area, **kwargs): source_area = dataset.attrs["area"] except KeyError: LOG.info("Cannot reproject dataset %s, missing area info", - dataset.attrs['name']) + dataset.attrs["name"]) return dataset - fill_value = kwargs.pop('fill_value', get_fill_value(dataset)) + fill_value = kwargs.pop("fill_value", get_fill_value(dataset)) new_data = resample(source_area, dataset, destination_area, fill_value=fill_value, **kwargs) new_attrs = new_data.attrs new_data.attrs = dataset.attrs.copy() diff --git a/satpy/scene.py b/satpy/scene.py index 733cacbfe7..d96c81a0e4 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -141,7 +141,7 @@ def __init__(self, filenames=None, reader=None, filter_parameters=None, cleaned_reader_kwargs = {} else: cleaned_reader_kwargs = cleaned_reader_kwargs.copy() - cleaned_reader_kwargs.setdefault('filter_parameters', {}).update(filter_parameters) + cleaned_reader_kwargs.setdefault("filter_parameters", {}).update(filter_parameters) if filenames and isinstance(filenames, str): raise ValueError("'filenames' must be a list of files: Scene(filenames=[filename])") @@ -163,7 +163,7 @@ def wishlist(self): return self._wishlist.copy() def _ipython_key_completions_(self): - return [x['name'] for x in self._datasets.keys()] + return [x["name"] for x in self._datasets.keys()] def _create_reader_instances(self, filenames=None, @@ -210,10 +210,10 @@ def start_time(self): will be consulted. """ - start_times = [data_arr.attrs['start_time'] for data_arr in self.values() - if 'start_time' in data_arr.attrs] + start_times = [data_arr.attrs["start_time"] for data_arr in self.values() + if "start_time" in data_arr.attrs] if not start_times: - start_times = self._reader_times('start_time') + start_times = self._reader_times("start_time") if not start_times: return None return min(start_times) @@ -227,10 +227,10 @@ def end_time(self): :attr:`Scene.start_time` is returned. """ - end_times = [data_arr.attrs['end_time'] for data_arr in self.values() - if 'end_time' in data_arr.attrs] + end_times = [data_arr.attrs["end_time"] for data_arr in self.values() + if "end_time" in data_arr.attrs] if not end_times: - end_times = self._reader_times('end_time') + end_times = self._reader_times("end_time") if not end_times: return self.start_time return max(end_times) @@ -309,7 +309,7 @@ def _gather_all_areas(self, datasets): continue elif not isinstance(ds, DataArray): ds = self[ds] - area = ds.attrs.get('area') + area = ds.attrs.get("area") areas.append(area) areas = [x for x in areas if x is not None] if not areas: @@ -439,7 +439,7 @@ def available_dataset_names(self, reader_name=None, composites=False): Returns: list of available dataset names """ - return sorted(set(x['name'] for x in self.available_dataset_ids( + return sorted(set(x["name"] for x in self.available_dataset_ids( reader_name=reader_name, composites=composites))) def all_dataset_ids(self, reader_name=None, composites=False): @@ -495,7 +495,7 @@ def all_dataset_names(self, reader_name=None, composites=False): Returns: list of all dataset names """ - return sorted(set(x['name'] for x in self.all_dataset_ids( + return sorted(set(x["name"] for x in self.all_dataset_ids( reader_name=reader_name, composites=composites))) def _check_known_composites(self, available_only=False): @@ -508,7 +508,7 @@ def _check_known_composites(self, available_only=False): dep_tree = DependencyTree(self._readers, sensor_comps, mods, available_only=available_only) # ignore inline compositor dependencies starting with '_' comps = (comp for comp_dict in sensor_comps.values() - for comp in comp_dict.keys() if not comp['name'].startswith('_')) + for comp in comp_dict.keys() if not comp["name"].startswith("_")) # make sure that these composites are even create-able by these readers all_comps = set(comps) # find_dependencies will update the all_comps set with DataIDs @@ -526,7 +526,7 @@ def available_composite_ids(self): def available_composite_names(self): """Names of all configured composites known to this Scene.""" - return sorted(set(x['name'] for x in self.available_composite_ids())) + return sorted(set(x["name"] for x in self.available_composite_ids())) def all_composite_ids(self): """Get all IDs for configured composites.""" @@ -534,7 +534,7 @@ def all_composite_ids(self): def all_composite_names(self): """Get all names for all configured composites.""" - return sorted(set(x['name'] for x in self.all_composite_ids())) + return sorted(set(x["name"] for x in self.all_composite_ids())) def all_modifier_names(self): """Get names of configured modifier objects.""" @@ -557,7 +557,7 @@ def iter_by_area(self): """ datasets_by_area = {} for ds in self: - a = ds.attrs.get('area') + a = ds.attrs.get("area") dsid = DataID.from_dataarray(ds) datasets_by_area.setdefault(a, []).append(dsid) @@ -597,14 +597,14 @@ def copy(self, datasets=None): @property def all_same_area(self): """All contained data arrays are on the same area.""" - all_areas = [x.attrs.get('area', None) for x in self.values()] + all_areas = [x.attrs.get("area", None) for x in self.values()] all_areas = [x for x in all_areas if x is not None] return all(all_areas[0] == x for x in all_areas[1:]) @property def all_same_proj(self): """All contained data array are in the same projection.""" - all_areas = [x.attrs.get('area', None) for x in self.values()] + all_areas = [x.attrs.get("area", None) for x in self.values()] all_areas = [x for x in all_areas if x is not None] return all(all_areas[0].crs == x.crs for x in all_areas[1:]) @@ -614,11 +614,11 @@ def _slice_area_from_bbox(src_area, dst_area, ll_bbox=None, """Slice the provided area using the bounds provided.""" if ll_bbox is not None: dst_area = AreaDefinition( - 'crop_area', 'crop_area', 'crop_latlong', - {'proj': 'latlong'}, 100, 100, ll_bbox) + "crop_area", "crop_area", "crop_latlong", + {"proj": "latlong"}, 100, 100, ll_bbox) elif xy_bbox is not None: dst_area = AreaDefinition( - 'crop_area', 'crop_area', 'crop_xy', + "crop_area", "crop_area", "crop_xy", src_area.crs, src_area.width, src_area.height, xy_bbox) x_slice, y_slice = src_area.get_area_slices(dst_area) @@ -638,7 +638,7 @@ def _slice_datasets(self, dataset_ids, slice_key, new_area, area_only=True): if ds_id in new_datasets: replace_anc(ds, pres) continue - if area_only and ds.attrs.get('area') is None: + if area_only and ds.attrs.get("area") is None: new_datasets[ds_id] = ds replace_anc(ds, pres) continue @@ -650,7 +650,7 @@ def _slice_datasets(self, dataset_ids, slice_key, new_area, area_only=True): key = slice_key new_ds = ds.isel(**key) if new_area is not None: - new_ds.attrs['area'] = new_area + new_ds.attrs["area"] = new_area new_datasets[ds_id] = new_ds if parent_ds is None: @@ -679,7 +679,7 @@ def slice(self, key): if area is not None: # assume dimensions for area are y and x one_ds = self[dataset_ids[0]] - area_key = tuple(sl for dim, sl in zip(one_ds.dims, key) if dim in ['y', 'x']) + area_key = tuple(sl for dim, sl in zip(one_ds.dims, key) if dim in ["y", "x"]) new_area = area[area_key] else: new_area = None @@ -759,7 +759,7 @@ def crop(self, area=None, ll_bbox=None, xy_bbox=None, dataset_ids=None): x_slice = slice(min_x_slice.start * x_factor, min_x_slice.stop * x_factor) new_area = src_area[y_slice, x_slice] - slice_key = {'y': y_slice, 'x': x_slice} + slice_key = {"y": y_slice, "x": x_slice} new_scn._slice_datasets(dataset_ids, slice_key, new_area) else: new_target_areas[src_area] = self._slice_area_from_bbox( @@ -768,7 +768,7 @@ def crop(self, area=None, ll_bbox=None, xy_bbox=None, dataset_ids=None): return new_scn - def aggregate(self, dataset_ids=None, boundary='trim', side='left', func='mean', **dim_kwargs): + def aggregate(self, dataset_ids=None, boundary="trim", side="left", func="mean", **dim_kwargs): """Create an aggregated version of the Scene. Args: @@ -785,7 +785,7 @@ def aggregate(self, dataset_ids=None, boundary='trim', side='left', func='mean', Returns: A new aggregated scene - See also: + See Also: xarray.DataArray.coarsen Example: @@ -810,8 +810,8 @@ def aggregate(self, dataset_ids=None, boundary='trim', side='left', func='mean', side=side, **dim_kwargs) new_scn._datasets[ds_id].attrs = self[ds_id].attrs.copy() - new_scn._datasets[ds_id].attrs['area'] = target_area - new_scn._datasets[ds_id].attrs['resolution'] = resolution + new_scn._datasets[ds_id].attrs["area"] = target_area + new_scn._datasets[ds_id].attrs["resolution"] = resolution return new_scn def get(self, key, default=None): @@ -846,11 +846,11 @@ def _slice_data(self, source_area, slices, dataset): """Slice the data to reduce it.""" slice_x, slice_y = slices dataset = dataset.isel(x=slice_x, y=slice_y) - if ('x', source_area.width) not in dataset.sizes.items(): + if ("x", source_area.width) not in dataset.sizes.items(): raise RuntimeError - if ('y', source_area.height) not in dataset.sizes.items(): + if ("y", source_area.height) not in dataset.sizes.items(): raise RuntimeError - dataset.attrs['area'] = source_area + dataset.attrs["area"] = source_area return dataset @@ -877,19 +877,19 @@ def _resampled_scene(self, new_scn, destination_area, reduce_data=True, if ds_id in new_scn._datasets: new_scn._datasets[ds_id] = new_datasets[ds_id] continue - if dataset.attrs.get('area') is None: + if dataset.attrs.get("area") is None: if parent_dataset is None: new_scn._datasets[ds_id] = dataset else: replace_anc(dataset, pres) continue LOG.debug("Resampling %s", ds_id) - source_area = dataset.attrs['area'] + source_area = dataset.attrs["area"] dataset, source_area = self._reduce_data(dataset, source_area, destination_area, reduce_data, reductions, resample_kwargs) self._prepare_resampler(source_area, destination_area, resamplers, resample_kwargs) kwargs = resample_kwargs.copy() - kwargs['resampler'] = resamplers[source_area] + kwargs["resampler"] = resamplers[source_area] res = resample_dataset(dataset, destination_area, **kwargs) new_datasets[ds_id] = res if ds_id in new_scn._datasets: @@ -900,7 +900,7 @@ def _resampled_scene(self, new_scn, destination_area, reduce_data=True, def _get_finalized_destination_area(self, destination_area, new_scn): if isinstance(destination_area, str): destination_area = get_area_def(destination_area) - if hasattr(destination_area, 'freeze'): + if hasattr(destination_area, "freeze"): try: finest_area = new_scn.finest_area() destination_area = destination_area.freeze(finest_area) @@ -923,8 +923,8 @@ def _reduce_data(self, dataset, source_area, destination_area, reduce_data, redu try: (slice_x, slice_y), source_area = reductions[key] except KeyError: - if resample_kwargs.get('resampler') == 'gradient_search': - factor = resample_kwargs.get('shape_divisible_by', 2) + if resample_kwargs.get("resampler") == "gradient_search": + factor = resample_kwargs.get("shape_divisible_by", 2) else: factor = None try: @@ -1051,7 +1051,7 @@ def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynami # by default select first data variable as display variable vdims = ds.data_vars[list(ds.data_vars.keys())[0]].name - if hasattr(ds, "area") and hasattr(ds.area, 'to_cartopy_crs'): + if hasattr(ds, "area") and hasattr(ds.area, "to_cartopy_crs"): dscrs = ds.area.to_cartopy_crs() gvds = gv.Dataset(ds, crs=dscrs) else: @@ -1083,17 +1083,17 @@ def to_xarray_dataset(self, datasets=None): if len(dataarrays) == 0: return xr.Dataset() - ds_dict = {i.attrs['name']: i.rename(i.attrs['name']) for i in dataarrays if i.attrs.get('area') is not None} + ds_dict = {i.attrs["name"]: i.rename(i.attrs["name"]) for i in dataarrays if i.attrs.get("area") is not None} mdata = combine_metadata(*tuple(i.attrs for i in dataarrays)) - if mdata.get('area') is None or not isinstance(mdata['area'], SwathDefinition): + if mdata.get("area") is None or not isinstance(mdata["area"], SwathDefinition): # either don't know what the area is or we have an AreaDefinition ds = xr.merge(ds_dict.values()) else: # we have a swath definition and should use lon/lat values - lons, lats = mdata['area'].get_lonlats() + lons, lats = mdata["area"].get_lonlats() if not isinstance(lons, DataArray): - lons = DataArray(lons, dims=('y', 'x')) - lats = DataArray(lats, dims=('y', 'x')) + lons = DataArray(lons, dims=("y", "x")) + lats = DataArray(lats, dims=("y", "x")) ds = xr.Dataset(ds_dict, coords={"latitude": lats, "longitude": lons}) @@ -1109,7 +1109,7 @@ def to_xarray(self, include_lonlats=True, epoch=None, include_orig_name=True, - numeric_name_prefix='CHANNEL_'): + numeric_name_prefix="CHANNEL_"): """Merge all xr.DataArray(s) of a satpy.Scene to a CF-compliant xarray object. If all Scene DataArrays are on the same area, it returns an xr.Dataset. @@ -1146,7 +1146,7 @@ def to_xarray(self, Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. - Returns + Returns: ------- ds, xr.Dataset A CF-compliant xr.Dataset @@ -1211,7 +1211,7 @@ def save_dataset(self, dataset_id, filename=None, writer=None, """ if writer is None and filename is None: - writer = 'geotiff' + writer = "geotiff" elif writer is None: writer = self._get_writer_by_ext(os.path.splitext(filename)[1]) @@ -1274,7 +1274,7 @@ def save_datasets(self, writer=None, filename=None, datasets=None, compute=True, "dimensions (eg. through resampling).") if writer is None: if filename is None: - writer = 'geotiff' + writer = "geotiff" else: writer = self._get_writer_by_ext(os.path.splitext(filename)[1]) writer, save_kwargs = load_writer(writer, @@ -1346,7 +1346,7 @@ def _get_writer_by_ext(extension): """ mapping = {".tiff": "geotiff", ".tif": "geotiff", ".nc": "cf", ".mitiff": "mitiff"} - return mapping.get(extension.lower(), 'simple_image') + return mapping.get(extension.lower(), "simple_image") def _remove_failed_datasets(self, keepables): """Remove the datasets that we couldn't create.""" @@ -1384,8 +1384,8 @@ def unload(self, keepables=None): LOG.debug("Unloading dataset: %r", ds_id) del self._datasets[ds_id] - def load(self, wishlist, calibration='*', resolution='*', - polarization='*', level='*', modifiers='*', generate=True, unload=True, + def load(self, wishlist, calibration="*", resolution="*", + polarization="*", level="*", modifiers="*", generate=True, unload=True, **kwargs): """Read and generate requested datasets. diff --git a/satpy/tests/compositor_tests/test_abi.py b/satpy/tests/compositor_tests/test_abi.py index 93df810cf5..7c29a12c63 100644 --- a/satpy/tests/compositor_tests/test_abi.py +++ b/satpy/tests/compositor_tests/test_abi.py @@ -26,7 +26,7 @@ class TestABIComposites(unittest.TestCase): def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors - load_compositor_configs_for_sensors(['abi']) + load_compositor_configs_for_sensors(["abi"]) def test_simulated_green(self): """Test creating a fake 'green' band.""" @@ -39,28 +39,27 @@ def test_simulated_green(self): rows = 5 cols = 10 area = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, + "test", "test", "test", + {"proj": "eqc", "lon_0": 0.0, + "lat_0": 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) - comp = SimulatedGreen('green', prerequisites=('C01', 'C02', 'C03'), - standard_name='toa_bidirectional_reflectance') + comp = SimulatedGreen("green", prerequisites=("C01", "C02", "C03"), + standard_name="toa_bidirectional_reflectance") c01 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.25, - dims=('y', 'x'), - attrs={'name': 'C01', 'area': area}) + dims=("y", "x"), + attrs={"name": "C01", "area": area}) c02 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.30, - dims=('y', 'x'), - attrs={'name': 'C02', 'area': area}) + dims=("y", "x"), + attrs={"name": "C02", "area": area}) c03 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.35, - dims=('y', 'x'), - attrs={'name': 'C03', 'area': area}) + dims=("y", "x"), + attrs={"name": "C03", "area": area}) res = comp((c01, c02, c03)) - self.assertIsInstance(res, xr.DataArray) - self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs['name'], 'green') - self.assertEqual(res.attrs['standard_name'], - 'toa_bidirectional_reflectance') + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs["name"] == "green" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.28025) diff --git a/satpy/tests/compositor_tests/test_agri.py b/satpy/tests/compositor_tests/test_agri.py index 32fcc72c61..b477dc53cb 100644 --- a/satpy/tests/compositor_tests/test_agri.py +++ b/satpy/tests/compositor_tests/test_agri.py @@ -26,7 +26,7 @@ class TestAGRIComposites(unittest.TestCase): def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors - load_compositor_configs_for_sensors(['agri']) + load_compositor_configs_for_sensors(["agri"]) def test_simulated_red(self): """Test creating a fake 'red' band.""" @@ -39,25 +39,24 @@ def test_simulated_red(self): rows = 5 cols = 10 area = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, + "test", "test", "test", + {"proj": "eqc", "lon_0": 0.0, + "lat_0": 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) - comp = SimulatedRed('red', prerequisites=('C01', 'C02', 'C03'), - standard_name='toa_bidirectional_reflectance') + comp = SimulatedRed("red", prerequisites=("C01", "C02", "C03"), + standard_name="toa_bidirectional_reflectance") c01 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.25, - dims=('y', 'x'), - attrs={'name': 'C01', 'area': area}) + dims=("y", "x"), + attrs={"name": "C01", "area": area}) c02 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.30, - dims=('y', 'x'), - attrs={'name': 'C02', 'area': area}) + dims=("y", "x"), + attrs={"name": "C02", "area": area}) res = comp((c01, c02)) - self.assertIsInstance(res, xr.DataArray) - self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs['name'], 'red') - self.assertEqual(res.attrs['standard_name'], - 'toa_bidirectional_reflectance') + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs["name"] == "red" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.24252874) diff --git a/satpy/tests/compositor_tests/test_ahi.py b/satpy/tests/compositor_tests/test_ahi.py index ed485bd924..980f5a746b 100644 --- a/satpy/tests/compositor_tests/test_ahi.py +++ b/satpy/tests/compositor_tests/test_ahi.py @@ -26,4 +26,4 @@ class TestAHIComposites(unittest.TestCase): def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors - load_compositor_configs_for_sensors(['ahi']) + load_compositor_configs_for_sensors(["ahi"]) diff --git a/satpy/tests/compositor_tests/test_glm.py b/satpy/tests/compositor_tests/test_glm.py index 6b79f96678..13783905da 100644 --- a/satpy/tests/compositor_tests/test_glm.py +++ b/satpy/tests/compositor_tests/test_glm.py @@ -24,7 +24,7 @@ class TestGLMComposites: def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors - load_compositor_configs_for_sensors(['glm']) + load_compositor_configs_for_sensors(["glm"]) def test_highlight_compositor(self): """Test creating a highlight composite.""" @@ -37,34 +37,34 @@ def test_highlight_compositor(self): rows = 5 cols = 10 area = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, + "test", "test", "test", + {"proj": "eqc", "lon_0": 0.0, + "lat_0": 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) comp = HighlightCompositor( - 'c14_highlight', - prerequisites=('flash_extent_density', 'C14'), + "c14_highlight", + prerequisites=("flash_extent_density", "C14"), min_hightlight=0.0, max_hightlight=1.0, ) flash_extent_density = xr.DataArray( da.zeros((rows, cols), chunks=25) + 0.5, - dims=('y', 'x'), - attrs={'name': 'flash_extent_density', 'area': area}) + dims=("y", "x"), + attrs={"name": "flash_extent_density", "area": area}) c14_data = np.repeat(np.arange(cols, dtype=np.float64)[None, :], rows, axis=0) c14 = xr.DataArray(da.from_array(c14_data, chunks=25) + 303.15, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'name': 'C14', - 'area': area, - 'standard_name': 'toa_brightness_temperature', + "name": "C14", + "area": area, + "standard_name": "toa_brightness_temperature", }) res = comp((flash_extent_density, c14)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'c14_highlight' + assert res.attrs["name"] == "c14_highlight" data = res.compute() np.testing.assert_almost_equal(data.values.min(), -0.04) np.testing.assert_almost_equal(data.values.max(), 1.04) diff --git a/satpy/tests/compositor_tests/test_sar.py b/satpy/tests/compositor_tests/test_sar.py index ed71e22730..30c342ce44 100644 --- a/satpy/tests/compositor_tests/test_sar.py +++ b/satpy/tests/compositor_tests/test_sar.py @@ -33,25 +33,24 @@ def test_sar_ice(self): rows = 2 cols = 2 - comp = SARIce('sar_ice', prerequisites=('hh', 'hv'), - standard_name='sar-ice') + comp = SARIce("sar_ice", prerequisites=("hh", "hv"), + standard_name="sar-ice") hh = xr.DataArray(da.zeros((rows, cols), chunks=25) + 2000, - dims=('y', 'x'), - attrs={'name': 'hh'}) + dims=("y", "x"), + attrs={"name": "hh"}) hv = xr.DataArray(da.zeros((rows, cols), chunks=25) + 1000, - dims=('y', 'x'), - attrs={'name': 'hv'}) + dims=("y", "x"), + attrs={"name": "hv"}) res = comp((hh, hv)) - self.assertIsInstance(res, xr.DataArray) - self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs['name'], 'sar_ice') - self.assertEqual(res.attrs['standard_name'], - 'sar-ice') + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs["name"] == "sar_ice" + assert res.attrs["standard_name"] == "sar-ice" data = res.compute() - np.testing.assert_allclose(data.sel(bands='R'), 31.58280822) - np.testing.assert_allclose(data.sel(bands='G'), 159869.56789876) - np.testing.assert_allclose(data.sel(bands='B'), 44.68138191) + np.testing.assert_allclose(data.sel(bands="R"), 31.58280822) + np.testing.assert_allclose(data.sel(bands="G"), 159869.56789876) + np.testing.assert_allclose(data.sel(bands="B"), 44.68138191) def test_sar_ice_log(self): """Test creating a the sar_ice_log composite.""" @@ -63,22 +62,21 @@ def test_sar_ice_log(self): rows = 2 cols = 2 - comp = SARIceLog('sar_ice_log', prerequisites=('hh', 'hv'), - standard_name='sar-ice-log') + comp = SARIceLog("sar_ice_log", prerequisites=("hh", "hv"), + standard_name="sar-ice-log") hh = xr.DataArray(da.zeros((rows, cols), chunks=25) - 10, - dims=('y', 'x'), - attrs={'name': 'hh'}) + dims=("y", "x"), + attrs={"name": "hh"}) hv = xr.DataArray(da.zeros((rows, cols), chunks=25) - 20, - dims=('y', 'x'), - attrs={'name': 'hv'}) + dims=("y", "x"), + attrs={"name": "hv"}) res = comp((hh, hv)) - self.assertIsInstance(res, xr.DataArray) - self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs['name'], 'sar_ice_log') - self.assertEqual(res.attrs['standard_name'], - 'sar-ice-log') + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs["name"] == "sar_ice_log" + assert res.attrs["standard_name"] == "sar-ice-log" data = res.compute() - np.testing.assert_allclose(data.sel(bands='R'), -20) - np.testing.assert_allclose(data.sel(bands='G'), -4.6) - np.testing.assert_allclose(data.sel(bands='B'), -10) + np.testing.assert_allclose(data.sel(bands="R"), -20) + np.testing.assert_allclose(data.sel(bands="G"), -4.6) + np.testing.assert_allclose(data.sel(bands="B"), -10) diff --git a/satpy/tests/compositor_tests/test_spectral.py b/satpy/tests/compositor_tests/test_spectral.py index b6460c911b..36a3dd9355 100644 --- a/satpy/tests/compositor_tests/test_spectral.py +++ b/satpy/tests/compositor_tests/test_spectral.py @@ -30,50 +30,50 @@ def setup_method(self): """Initialize channels.""" rows = 5 cols = 10 - self.c01 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.20, dims=('y', 'x'), attrs={'name': 'C02'}) - self.c02 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.30, dims=('y', 'x'), attrs={'name': 'C03'}) - self.c03 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.40, dims=('y', 'x'), attrs={'name': 'C04'}) + self.c01 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.20, dims=("y", "x"), attrs={"name": "C02"}) + self.c02 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.30, dims=("y", "x"), attrs={"name": "C03"}) + self.c03 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.40, dims=("y", "x"), attrs={"name": "C04"}) def test_bad_lengths(self): """Test that error is raised if the amount of channels to blend does not match the number of weights.""" - comp = SpectralBlender('blended_channel', fractions=(0.3, 0.7), prerequisites=(0.51, 0.85), - standard_name='toa_bidirectional_reflectance') - with pytest.raises(ValueError): + comp = SpectralBlender("blended_channel", fractions=(0.3, 0.7), prerequisites=(0.51, 0.85), + standard_name="toa_bidirectional_reflectance") + with pytest.raises(ValueError, match="fractions and projectables must have the same length."): comp((self.c01, self.c02, self.c03)) def test_spectral_blender(self): """Test the base class for spectral blending of channels.""" - comp = SpectralBlender('blended_channel', fractions=(0.3, 0.4, 0.3), prerequisites=(0.51, 0.65, 0.85), - standard_name='toa_bidirectional_reflectance') + comp = SpectralBlender("blended_channel", fractions=(0.3, 0.4, 0.3), prerequisites=(0.51, 0.65, 0.85), + standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c02, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'blended_channel' - assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' + assert res.attrs["name"] == "blended_channel" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.3) def test_hybrid_green(self): """Test hybrid green correction of the 'green' band.""" - comp = HybridGreen('hybrid_green', fraction=0.15, prerequisites=(0.51, 0.85), - standard_name='toa_bidirectional_reflectance') + comp = HybridGreen("hybrid_green", fraction=0.15, prerequisites=(0.51, 0.85), + standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'hybrid_green' - assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' + assert res.attrs["name"] == "hybrid_green" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.23) def test_green_corrector(self): """Test the deprecated class for green corrections.""" - comp = GreenCorrector('blended_channel', fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), - standard_name='toa_bidirectional_reflectance') + comp = GreenCorrector("blended_channel", fractions=(0.85, 0.15), prerequisites=(0.51, 0.85), + standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'blended_channel' - assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' + assert res.attrs["name"] == "blended_channel" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.compute() np.testing.assert_allclose(data, 0.23) @@ -84,36 +84,36 @@ class TestNdviHybridGreenCompositor: def setup_method(self): """Initialize channels.""" self.c01 = xr.DataArray(da.from_array([[0.25, 0.30], [0.20, 0.30]], chunks=25), - dims=('y', 'x'), attrs={'name': 'C02'}) + dims=("y", "x"), attrs={"name": "C02"}) self.c02 = xr.DataArray(da.from_array([[0.25, 0.30], [0.25, 0.35]], chunks=25), - dims=('y', 'x'), attrs={'name': 'C03'}) + dims=("y", "x"), attrs={"name": "C03"}) self.c03 = xr.DataArray(da.from_array([[0.35, 0.35], [0.28, 0.65]], chunks=25), - dims=('y', 'x'), attrs={'name': 'C04'}) + dims=("y", "x"), attrs={"name": "C04"}) def test_ndvi_hybrid_green(self): """Test General functionality with linear scaling from ndvi to blend fraction.""" - comp = NDVIHybridGreen('ndvi_hybrid_green', limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), - standard_name='toa_bidirectional_reflectance') + comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), prerequisites=(0.51, 0.65, 0.85), + standard_name="toa_bidirectional_reflectance") # Test General functionality with linear strength (=1.0) res = comp((self.c01, self.c02, self.c03)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'ndvi_hybrid_green' - assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' + assert res.attrs["name"] == "ndvi_hybrid_green" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" data = res.values np.testing.assert_array_almost_equal(data, np.array([[0.2633, 0.3071], [0.2115, 0.3420]]), decimal=4) def test_nonliniear_scaling(self): """Test non-linear scaling using `strength` term.""" - comp = NDVIHybridGreen('ndvi_hybrid_green', limits=(0.15, 0.05), strength=2.0, prerequisites=(0.51, 0.65, 0.85), - standard_name='toa_bidirectional_reflectance') + comp = NDVIHybridGreen("ndvi_hybrid_green", limits=(0.15, 0.05), strength=2.0, prerequisites=(0.51, 0.65, 0.85), + standard_name="toa_bidirectional_reflectance") res = comp((self.c01, self.c02, self.c03)) np.testing.assert_array_almost_equal(res.values, np.array([[0.2646, 0.3075], [0.2120, 0.3471]]), decimal=4) def test_invalid_strength(self): """Test using invalid `strength` term for non-linear scaling.""" - with pytest.raises(ValueError): - _ = NDVIHybridGreen('ndvi_hybrid_green', strength=0.0, prerequisites=(0.51, 0.65, 0.85), - standard_name='toa_bidirectional_reflectance') + with pytest.raises(ValueError, match="Expected strength greater than 0.0, got 0.0."): + _ = NDVIHybridGreen("ndvi_hybrid_green", strength=0.0, prerequisites=(0.51, 0.65, 0.85), + standard_name="toa_bidirectional_reflectance") diff --git a/satpy/tests/compositor_tests/test_viirs.py b/satpy/tests/compositor_tests/test_viirs.py index 66c5e8c426..1641e4248b 100644 --- a/satpy/tests/compositor_tests/test_viirs.py +++ b/satpy/tests/compositor_tests/test_viirs.py @@ -29,20 +29,20 @@ class TestVIIRSComposites: """Test various VIIRS-specific composites.""" - @pytest.fixture + @pytest.fixture() def area(self): """Return fake area for use with DNB tests.""" rows = 5 cols = 10 area = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, + "test", "test", "test", + {"proj": "eqc", "lon_0": 0.0, + "lat_0": 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) return area - @pytest.fixture + @pytest.fixture() def dnb(self, area): """Return fake channel 1 data for DNB tests.""" dnb = np.zeros(area.shape) + 0.25 @@ -50,12 +50,12 @@ def dnb(self, area): dnb[4:, :] += 0.5 dnb = da.from_array(dnb, chunks=25) c01 = xr.DataArray(dnb, - dims=('y', 'x'), - attrs={'name': 'DNB', 'area': area, - 'start_time': datetime(2020, 1, 1, 12, 0, 0)}) + dims=("y", "x"), + attrs={"name": "DNB", "area": area, + "start_time": datetime(2020, 1, 1, 12, 0, 0)}) return c01 - @pytest.fixture + @pytest.fixture() def sza(self, area): """Return fake sza dataset for DNB tests.""" # data changes by row, sza changes by col for testing @@ -64,12 +64,12 @@ def sza(self, area): sza[:, 4:] += 45.0 sza = da.from_array(sza, chunks=25) c02 = xr.DataArray(sza, - dims=('y', 'x'), - attrs={'name': 'solar_zenith_angle', 'area': area, - 'start_time': datetime(2020, 1, 1, 12, 0, 0)}) + dims=("y", "x"), + attrs={"name": "solar_zenith_angle", "area": area, + "start_time": datetime(2020, 1, 1, 12, 0, 0)}) return c02 - @pytest.fixture + @pytest.fixture() def lza(self, area): """Return fake lunal zenith angle dataset for DNB tests.""" lza = np.zeros(area.shape) + 70.0 @@ -77,29 +77,29 @@ def lza(self, area): lza[:, 4:] += 45.0 lza = da.from_array(lza, chunks=25) c03 = xr.DataArray(lza, - dims=('y', 'x'), - attrs={'name': 'lunar_zenith_angle', 'area': area, - 'start_time': datetime(2020, 1, 1, 12, 0, 0) + dims=("y", "x"), + attrs={"name": "lunar_zenith_angle", "area": area, + "start_time": datetime(2020, 1, 1, 12, 0, 0) }) return c03 def test_load_composite_yaml(self): """Test loading the yaml for this sensor.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors - load_compositor_configs_for_sensors(['viirs']) + load_compositor_configs_for_sensors(["viirs"]) def test_histogram_dnb(self, dnb, sza): """Test the 'histogram_dnb' compositor.""" from satpy.composites.viirs import HistogramDNB - comp = HistogramDNB('histogram_dnb', prerequisites=('dnb',), - standard_name='toa_outgoing_radiance_per_' - 'unit_wavelength') + comp = HistogramDNB("histogram_dnb", prerequisites=("dnb",), + standard_name="toa_outgoing_radiance_per_" + "unit_wavelength") res = comp((dnb, sza)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'histogram_dnb' - assert res.attrs['standard_name'] == 'equalized_radiance' + assert res.attrs["name"] == "histogram_dnb" + assert res.attrs["standard_name"] == "equalized_radiance" data = res.compute() unique_values = np.unique(data) np.testing.assert_allclose(unique_values, [0.5994, 0.7992, 0.999], rtol=1e-3) @@ -108,14 +108,14 @@ def test_adaptive_dnb(self, dnb, sza): """Test the 'adaptive_dnb' compositor.""" from satpy.composites.viirs import AdaptiveDNB - comp = AdaptiveDNB('adaptive_dnb', prerequisites=('dnb',), - standard_name='toa_outgoing_radiance_per_' - 'unit_wavelength') + comp = AdaptiveDNB("adaptive_dnb", prerequisites=("dnb",), + standard_name="toa_outgoing_radiance_per_" + "unit_wavelength") res = comp((dnb, sza)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'adaptive_dnb' - assert res.attrs['standard_name'] == 'equalized_radiance' + assert res.attrs["name"] == "adaptive_dnb" + assert res.attrs["standard_name"] == "equalized_radiance" data = res.compute() np.testing.assert_allclose(data.data, 0.999, rtol=1e-4) @@ -123,17 +123,17 @@ def test_hncc_dnb(self, area, dnb, sza, lza): """Test the 'hncc_dnb' compositor.""" from satpy.composites.viirs import NCCZinke - comp = NCCZinke('hncc_dnb', prerequisites=('dnb',), - standard_name='toa_outgoing_radiance_per_' - 'unit_wavelength') + comp = NCCZinke("hncc_dnb", prerequisites=("dnb",), + standard_name="toa_outgoing_radiance_per_" + "unit_wavelength") mif = xr.DataArray(da.zeros((5,), chunks=5) + 0.1, - dims=('y',), - attrs={'name': 'moon_illumination_fraction', 'area': area}) + dims=("y",), + attrs={"name": "moon_illumination_fraction", "area": area}) res = comp((dnb, sza, lza, mif)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'hncc_dnb' - assert res.attrs['standard_name'] == 'ncc_radiance' + assert res.attrs["name"] == "hncc_dnb" + assert res.attrs["standard_name"] == "ncc_radiance" data = res.compute() unique = np.unique(data) np.testing.assert_allclose( @@ -141,21 +141,21 @@ def test_hncc_dnb(self, area, dnb, sza, lza): 9.50784532e-03, 1.42617433e-02, 1.50001560e+03, 3.00001560e+03, 4.50001560e+03]) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Expected .*, got 2"): comp((dnb, sza)) def test_hncc_dnb_nomoonpha(self, area, dnb, sza, lza): """Test the 'hncc_dnb' compositor when no moon phase data is provided.""" from satpy.composites.viirs import NCCZinke - comp = NCCZinke('hncc_dnb', prerequisites=('dnb',), - standard_name='toa_outgoing_radiance_per_' - 'unit_wavelength') + comp = NCCZinke("hncc_dnb", prerequisites=("dnb",), + standard_name="toa_outgoing_radiance_per_" + "unit_wavelength") res = comp((dnb, sza, lza)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'hncc_dnb' - assert res.attrs['standard_name'] == 'ncc_radiance' + assert res.attrs["name"] == "hncc_dnb" + assert res.attrs["standard_name"] == "ncc_radiance" data = res.compute() unique = np.unique(data) np.testing.assert_allclose( @@ -169,10 +169,10 @@ def test_erf_dnb(self, dnb_units, saturation_correction, area, sza, lza): """Test the 'dynamic_dnb' or ERF DNB compositor.""" from satpy.composites.viirs import ERFDNB - comp = ERFDNB('dynamic_dnb', prerequisites=('dnb',), + comp = ERFDNB("dynamic_dnb", prerequisites=("dnb",), saturation_correction=saturation_correction, - standard_name='toa_outgoing_radiance_per_' - 'unit_wavelength') + standard_name="toa_outgoing_radiance_per_" + "unit_wavelength") # dnb is different from in the other tests, so don't use the fixture # here dnb = np.zeros(area.shape) + 0.25 @@ -184,16 +184,16 @@ def test_erf_dnb(self, dnb_units, saturation_correction, area, sza, lza): dnb /= 10000.0 dnb = da.from_array(dnb, chunks=25) c01 = xr.DataArray(dnb, - dims=('y', 'x'), - attrs={'name': 'DNB', 'area': area, 'units': dnb_units}) + dims=("y", "x"), + attrs={"name": "DNB", "area": area, "units": dnb_units}) mif = xr.DataArray(da.zeros((5,), chunks=5) + 0.1, - dims=('y',), - attrs={'name': 'moon_illumination_fraction', 'area': area}) + dims=("y",), + attrs={"name": "moon_illumination_fraction", "area": area}) res = comp((c01, sza, lza, mif)) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['name'] == 'dynamic_dnb' - assert res.attrs['standard_name'] == 'equalized_radiance' + assert res.attrs["name"] == "dynamic_dnb" + assert res.attrs["standard_name"] == "equalized_radiance" data = res.compute() unique = np.unique(data) assert np.isnan(unique).any() diff --git a/satpy/tests/conftest.py b/satpy/tests/conftest.py index 8bcbea2093..754b11ffcd 100644 --- a/satpy/tests/conftest.py +++ b/satpy/tests/conftest.py @@ -26,11 +26,11 @@ import satpy -TEST_ETC_DIR = os.path.join(os.path.dirname(__file__), 'etc') +TEST_ETC_DIR = os.path.join(os.path.dirname(__file__), "etc") @pytest.fixture(autouse=True) -def reset_satpy_config(tmpdir): +def _reset_satpy_config(tmpdir): """Set satpy config to logical defaults for tests.""" test_config = { "cache_dir": str(tmpdir / "cache"), @@ -44,13 +44,13 @@ def reset_satpy_config(tmpdir): @pytest.fixture(autouse=True) -def clear_function_caches(): +def _clear_function_caches(): """Clear out global function-level caches that may cause conflicts between tests.""" from satpy.composites.config_loader import load_compositor_configs_for_sensor load_compositor_configs_for_sensor.cache_clear() -@pytest.fixture +@pytest.fixture() def include_test_etc(): """Tell Satpy to use the config 'etc' directory from the tests directory.""" with satpy.config.set(config_path=[TEST_ETC_DIR]): diff --git a/satpy/tests/enhancement_tests/test_abi.py b/satpy/tests/enhancement_tests/test_abi.py index f7ebb853b4..4a878ce96c 100644 --- a/satpy/tests/enhancement_tests/test_abi.py +++ b/satpy/tests/enhancement_tests/test_abi.py @@ -30,7 +30,7 @@ class TestABIEnhancement(unittest.TestCase): def setUp(self): """Create fake data for the tests.""" data = da.linspace(0, 1, 16).reshape((4, 4)) - self.da = xr.DataArray(data, dims=('y', 'x'), attrs={'test': 'test'}) + self.da = xr.DataArray(data, dims=("y", "x"), attrs={"test": "test"}) def test_cimss_true_color_contrast(self): """Test the cimss_true_color_contrast enhancement.""" diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index e95c55a362..ca0d56f11f 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -70,12 +70,12 @@ def setup_method(self): crefl_data /= 5.605 crefl_data[0, 0] = np.nan # one bad value for testing crefl_data[0, 1] = 0. - self.ch1 = xr.DataArray(da.from_array(data, chunks=2), dims=('y', 'x'), attrs={'test': 'test'}) - self.ch2 = xr.DataArray(da.from_array(crefl_data, chunks=2), dims=('y', 'x'), attrs={'test': 'test'}) + self.ch1 = xr.DataArray(da.from_array(data, chunks=2), dims=("y", "x"), attrs={"test": "test"}) + self.ch2 = xr.DataArray(da.from_array(crefl_data, chunks=2), dims=("y", "x"), attrs={"test": "test"}) rgb_data = np.stack([data, data, data]) self.rgb = xr.DataArray(da.from_array(rgb_data, chunks=(3, 2, 2)), - dims=('bands', 'y', 'x'), - coords={'bands': ['R', 'G', 'B']}) + dims=("bands", "y", "x"), + coords={"bands": ["R", "G", "B"]}) @pytest.mark.parametrize( ("decorator", "exp_call_cls"), @@ -208,20 +208,20 @@ def test_merge_colormaps(self): create_colormap_mock = mock.Mock(wraps=create_colormap) cmap1 = Colormap((1, (1., 1., 1.))) - kwargs = {'palettes': cmap1} + kwargs = {"palettes": cmap1} - with mock.patch('satpy.enhancements.create_colormap', create_colormap_mock): + with mock.patch("satpy.enhancements.create_colormap", create_colormap_mock): res = mcp(kwargs) assert res is cmap1 create_colormap_mock.assert_not_called() create_colormap_mock.reset_mock() ret_map.reset_mock() - cmap1 = {'colors': 'blues', 'min_value': 0, - 'max_value': 1} - kwargs = {'palettes': [cmap1]} - with mock.patch('satpy.enhancements.create_colormap', create_colormap_mock), \ - mock.patch('trollimage.colormap.blues', ret_map): + cmap1 = {"colors": "blues", "min_value": 0, + "max_value": 1} + kwargs = {"palettes": [cmap1]} + with mock.patch("satpy.enhancements.create_colormap", create_colormap_mock), \ + mock.patch("trollimage.colormap.blues", ret_map): _ = mcp(kwargs) create_colormap_mock.assert_called_once() ret_map.reverse.assert_not_called() @@ -229,18 +229,18 @@ def test_merge_colormaps(self): create_colormap_mock.reset_mock() ret_map.reset_mock() - cmap2 = {'colors': 'blues', 'min_value': 2, - 'max_value': 3, 'reverse': True} - kwargs = {'palettes': [cmap2]} - with mock.patch('trollimage.colormap.blues', ret_map): + cmap2 = {"colors": "blues", "min_value": 2, + "max_value": 3, "reverse": True} + kwargs = {"palettes": [cmap2]} + with mock.patch("trollimage.colormap.blues", ret_map): _ = mcp(kwargs) ret_map.reverse.assert_called_once() ret_map.set_range.assert_called_with(2, 3) create_colormap_mock.reset_mock() ret_map.reset_mock() - kwargs = {'palettes': [cmap1, cmap2]} - with mock.patch('trollimage.colormap.blues', ret_map): + kwargs = {"palettes": [cmap1, cmap2]} + with mock.patch("trollimage.colormap.blues", ret_map): _ = mcp(kwargs) ret_map.__add__.assert_called_once() @@ -342,7 +342,7 @@ def test_cmap_vrgb_as_rgba(self): with closed_named_temp_file(suffix=".npy") as cmap_filename: cmap_data = _generate_cmap_test_data(None, "VRGB") np.save(cmap_filename, cmap_data) - cmap = create_colormap({'filename': cmap_filename, 'colormap_mode': "RGBA"}) + cmap = create_colormap({"filename": cmap_filename, "colormap_mode": "RGBA"}) assert cmap.colors.shape[0] == 4 assert cmap.colors.shape[1] == 4 # RGBA np.testing.assert_equal(cmap.colors[0], [128 / 255., 1.0, 0, 0]) @@ -365,15 +365,15 @@ def test_cmap_bad_mode(self, real_mode, forced_mode, filename_suffix): cmap_data = _generate_cmap_test_data(None, real_mode) _write_cmap_to_file(cmap_filename, cmap_data) # Force colormap_mode VRGBA to RGBA and we should see an exception - with pytest.raises(ValueError): - create_colormap({'filename': cmap_filename, 'colormap_mode': forced_mode}) + with pytest.raises(ValueError, match="Unexpected colormap shape for mode .*"): + create_colormap({"filename": cmap_filename, "colormap_mode": forced_mode}) def test_cmap_from_file_bad_shape(self): """Test that unknown array shape causes an error.""" from satpy.enhancements import create_colormap # create the colormap file on disk - with closed_named_temp_file(suffix='.npy') as cmap_filename: + with closed_named_temp_file(suffix=".npy") as cmap_filename: np.save(cmap_filename, np.array([ [0], [64], @@ -381,8 +381,8 @@ def test_cmap_from_file_bad_shape(self): [255], ])) - with pytest.raises(ValueError): - create_colormap({'filename': cmap_filename}) + with pytest.raises(ValueError, match="Unexpected colormap shape for mode 'None'"): + create_colormap({"filename": cmap_filename}) def test_cmap_from_config_path(self, tmp_path): """Test loading a colormap relative to a config path.""" @@ -396,7 +396,7 @@ def test_cmap_from_config_path(self, tmp_path): np.save(cmap_filename, cmap_data) with satpy.config.set(config_path=[tmp_path]): rel_cmap_filename = os.path.join("colormaps", "my_colormap.npy") - cmap = create_colormap({'filename': rel_cmap_filename, 'colormap_mode': "RGBA"}) + cmap = create_colormap({"filename": rel_cmap_filename, "colormap_mode": "RGBA"}) assert cmap.colors.shape[0] == 4 assert cmap.colors.shape[1] == 4 # RGBA np.testing.assert_equal(cmap.colors[0], [128 / 255., 1.0, 0, 0]) @@ -407,7 +407,7 @@ def test_cmap_from_config_path(self, tmp_path): def test_cmap_from_trollimage(self): """Test that colormaps in trollimage can be loaded.""" from satpy.enhancements import create_colormap - cmap = create_colormap({'colors': 'pubu'}) + cmap = create_colormap({"colors": "pubu"}) from trollimage.colormap import pubu np.testing.assert_equal(cmap.colors, pubu.colors) np.testing.assert_equal(cmap.values, pubu.values) @@ -415,7 +415,7 @@ def test_cmap_from_trollimage(self): def test_cmap_no_colormap(self): """Test that being unable to create a colormap raises an error.""" from satpy.enhancements import create_colormap - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unknown colormap format: .*"): create_colormap({}) def test_cmap_list(self): @@ -428,14 +428,14 @@ def test_cmap_list(self): [1, 1, 1], ] values = [2, 4, 6, 8] - cmap = create_colormap({'colors': colors, 'color_scale': 1}) + cmap = create_colormap({"colors": colors, "color_scale": 1}) assert cmap.colors.shape[0] == 4 np.testing.assert_equal(cmap.colors[0], [0.0, 0.0, 1.0]) assert cmap.values.shape[0] == 4 assert cmap.values[0] == 0 assert cmap.values[-1] == 1.0 - cmap = create_colormap({'colors': colors, 'color_scale': 1, 'values': values}) + cmap = create_colormap({"colors": colors, "color_scale": 1, "values": values}) assert cmap.colors.shape[0] == 4 np.testing.assert_equal(cmap.colors[0], [0.0, 0.0, 1.0]) assert cmap.values.shape[0] == 4 @@ -451,7 +451,7 @@ def func(array, index, gain=2): coords=array.coords, dims=array.dims, attrs=array.attrs) separate_func = on_separate_bands(func) - arr = xr.DataArray(np.zeros((3, 10, 10)), dims=['bands', 'y', 'x'], coords={"bands": ["R", "G", "B"]}) + arr = xr.DataArray(np.zeros((3, 10, 10)), dims=["bands", "y", "x"], coords={"bands": ["R", "G", "B"]}) assert separate_func(arr).shape == arr.shape assert all(separate_func(arr, gain=1).values[:, 0, 0] == [0, 1, 2]) @@ -460,11 +460,11 @@ def test_using_map_blocks(): """Test the `using_map_blocks` decorator.""" def func(np_array, block_info=None): - value = block_info[0]['chunk-location'][-1] + value = block_info[0]["chunk-location"][-1] return np.ones(np_array.shape) * value map_blocked_func = using_map_blocks(func) - arr = xr.DataArray(da.zeros((3, 10, 10), dtype=int, chunks=5), dims=['bands', 'y', 'x']) + arr = xr.DataArray(da.zeros((3, 10, 10), dtype=int, chunks=5), dims=["bands", "y", "x"]) res = map_blocked_func(arr) assert res.shape == arr.shape assert res[0, 0, 0].compute() != res[0, 9, 9].compute() @@ -479,12 +479,12 @@ def func(dask_array): return dask_array dask_func = on_dask_array(func) - arr = xr.DataArray(da.zeros((3, 10, 10), dtype=int, chunks=5), dims=['bands', 'y', 'x']) + arr = xr.DataArray(da.zeros((3, 10, 10), dtype=int, chunks=5), dims=["bands", "y", "x"]) res = dask_func(arr) assert res.shape == arr.shape -@pytest.fixture +@pytest.fixture() def fake_area(): """Return a fake 2×2 area.""" from pyresample.geometry import create_area_def @@ -492,88 +492,87 @@ def fake_area(): _nwcsaf_geo_props = { - 'cma_geo': ("geo", "cma", None, 'cma_pal', None, 'cloudmask', 'CMA', "uint8"), - 'cma_pps': ("pps", "cma", None, 'cma_pal', None, 'cloudmask', 'CMA', "uint8"), - 'cma_extended_pps': ("pps", "cma_extended", None, 'cma_extended_pal', None, - 'cloudmask_extended', 'CMA', "uint8"), - 'cmaprob_pps': ("pps", "cmaprob", None, 'cmaprob_pal', None, 'cloudmask_probability', - 'CMAPROB', "uint8"), - 'ct_geo': ("geo", "ct", None, 'ct_pal', None, 'cloudtype', 'CT', "uint8"), - 'ct_pps': ("pps", "ct", None, 'ct_pal', None, 'cloudtype', 'CT', "uint8"), - 'ctth_alti_geo': ("geo", "ctth_alti", None, 'ctth_alti_pal', None, 'cloud_top_height', - 'CTTH', "float64"), - 'ctth_alti_pps': ("pps", "ctth_alti", None, 'ctth_alti_pal', "ctth_status_flag", - 'cloud_top_height', 'CTTH', "float64"), - 'ctth_pres_geo': ("geo", "ctth_pres", None, 'ctth_pres_pal', None, 'cloud_top_pressure', - 'CTTH', "float64"), - 'ctth_pres_pps': ("pps", "ctth_pres", None, 'ctth_pres_pal', None, 'cloud_top_pressure', - 'CTTH', "float64"), - 'ctth_tempe_geo': ("geo", "ctth_tempe", None, 'ctth_tempe_pal', None, 'cloud_top_temperature', - 'CTTH', "float64"), - 'ctth_tempe_pps': ("pps", "ctth_tempe", None, 'ctth_tempe_pal', None, 'cloud_top_temperature', - 'CTTH', "float64"), - 'cmic_phase_geo': ("geo", "cmic_phase", None, 'cmic_phase_pal', None, 'cloud_top_phase', - 'CMIC', "uint8"), - 'cmic_phase_pps': ("pps", "cmic_phase", None, 'cmic_phase_pal', "cmic_status_flag", 'cloud_top_phase', - 'CMIC', "uint8"), - 'cmic_reff_geo': ("geo", "cmic_reff", None, 'cmic_reff_pal', None, 'cloud_drop_effective_radius', - 'CMIC', "float64"), - 'cmic_reff_pps': ("pps", "cmic_reff", "cmic_cre", 'cmic_cre_pal', "cmic_status_flag", - 'cloud_drop_effective_radius', 'CMIC', "float64"), - 'cmic_cot_geo': ("geo", "cmic_cot", None, 'cmic_cot_pal', None, 'cloud_optical_thickness', - 'CMIC', "float64"), - 'cmic_cot_pps': ("pps", "cmic_cot", None, 'cmic_cot_pal', None, 'cloud_optical_thickness', - 'CMIC', "float64"), - 'cmic_cwp_pps': ("pps", "cmic_cwp", None, 'cmic_cwp_pal', None, 'cloud_water_path', - 'CMIC', "float64"), - 'cmic_lwp_geo': ("geo", "cmic_lwp", None, 'cmic_lwp_pal', None, 'cloud_liquid_water_path', - 'CMIC', "float64"), - 'cmic_lwp_pps': ("pps", "cmic_lwp", None, 'cmic_lwp_pal', None, 'liquid_water_path', - 'CMIC', "float64"), - 'cmic_iwp_geo': ("geo", "cmic_iwp", None, 'cmic_iwp_pal', None, 'cloud_ice_water_path', - 'CMIC', "float64"), - 'cmic_iwp_pps': ("pps", "cmic_iwp", None, 'cmic_iwp_pal', None, 'ice_water_path', - 'CMIC', "float64"), - 'pc': ("geo", "pc", None, 'pc_pal', None, 'precipitation_probability', 'PC', "uint8"), - 'crr': ("geo", "crr", None, 'crr_pal', None, 'convective_rain_rate', 'CRR', "uint8"), - 'crr_accum': ("geo", "crr_accum", None, 'crr_pal', None, - 'convective_precipitation_hourly_accumulation', 'CRR', "uint8"), - 'ishai_tpw': ("geo", "ishai_tpw", None, 'ishai_tpw_pal', None, 'total_precipitable_water', - 'iSHAI', "float64"), - 'ishai_shw': ("geo", "ishai_shw", None, 'ishai_shw_pal', None, 'showalter_index', - 'iSHAI', "float64"), - 'ishai_li': ("geo", "ishai_li", None, 'ishai_li_pal', None, 'lifted_index', - 'iSHAI', "float64"), - 'ci_prob30': ("geo", "ci_prob30", None, 'ci_pal', None, 'convection_initiation_prob30', - 'CI', "float64"), - 'ci_prob60': ("geo", "ci_prob60", None, 'ci_pal', None, 'convection_initiation_prob60', - 'CI', "float64"), - 'ci_prob90': ("geo", "ci_prob90", None, 'ci_pal', None, 'convection_initiation_prob90', - 'CI', "float64"), - 'asii_turb_trop_prob': ("geo", "asii_turb_trop_prob", None, 'asii_turb_prob_pal', None, - 'asii_prob', 'ASII-NG', "float64"), - 'MapCellCatType': ("geo", "MapCellCatType", None, 'MapCellCatType_pal', None, - 'rdt_cell_type', 'RDT-CW', "uint8"), + "cma_geo": ("geo", "cma", None, "cma_pal", None, "cloudmask", "CMA", "uint8"), + "cma_pps": ("pps", "cma", None, "cma_pal", None, "cloudmask", "CMA", "uint8"), + "cma_extended_pps": ("pps", "cma_extended", None, "cma_extended_pal", None, + "cloudmask_extended", "CMA", "uint8"), + "cmaprob_pps": ("pps", "cmaprob", None, "cmaprob_pal", None, "cloudmask_probability", + "CMAPROB", "uint8"), + "ct_geo": ("geo", "ct", None, "ct_pal", None, "cloudtype", "CT", "uint8"), + "ct_pps": ("pps", "ct", None, "ct_pal", None, "cloudtype", "CT", "uint8"), + "ctth_alti_geo": ("geo", "ctth_alti", None, "ctth_alti_pal", None, "cloud_top_height", + "CTTH", "float64"), + "ctth_alti_pps": ("pps", "ctth_alti", None, "ctth_alti_pal", "ctth_status_flag", + "cloud_top_height", "CTTH", "float64"), + "ctth_pres_geo": ("geo", "ctth_pres", None, "ctth_pres_pal", None, "cloud_top_pressure", + "CTTH", "float64"), + "ctth_pres_pps": ("pps", "ctth_pres", None, "ctth_pres_pal", None, "cloud_top_pressure", + "CTTH", "float64"), + "ctth_tempe_geo": ("geo", "ctth_tempe", None, "ctth_tempe_pal", None, "cloud_top_temperature", + "CTTH", "float64"), + "ctth_tempe_pps": ("pps", "ctth_tempe", None, "ctth_tempe_pal", None, "cloud_top_temperature", + "CTTH", "float64"), + "cmic_phase_geo": ("geo", "cmic_phase", None, "cmic_phase_pal", None, "cloud_top_phase", + "CMIC", "uint8"), + "cmic_phase_pps": ("pps", "cmic_phase", None, "cmic_phase_pal", "cmic_status_flag", "cloud_top_phase", + "CMIC", "uint8"), + "cmic_reff_geo": ("geo", "cmic_reff", None, "cmic_reff_pal", None, "cloud_drop_effective_radius", + "CMIC", "float64"), + "cmic_reff_pps": ("pps", "cmic_reff", "cmic_cre", "cmic_cre_pal", "cmic_status_flag", + "cloud_drop_effective_radius", "CMIC", "float64"), + "cmic_cot_geo": ("geo", "cmic_cot", None, "cmic_cot_pal", None, "cloud_optical_thickness", + "CMIC", "float64"), + "cmic_cot_pps": ("pps", "cmic_cot", None, "cmic_cot_pal", None, "cloud_optical_thickness", + "CMIC", "float64"), + "cmic_cwp_pps": ("pps", "cmic_cwp", None, "cmic_cwp_pal", None, "cloud_water_path", + "CMIC", "float64"), + "cmic_lwp_geo": ("geo", "cmic_lwp", None, "cmic_lwp_pal", None, "cloud_liquid_water_path", + "CMIC", "float64"), + "cmic_lwp_pps": ("pps", "cmic_lwp", None, "cmic_lwp_pal", None, "liquid_water_path", + "CMIC", "float64"), + "cmic_iwp_geo": ("geo", "cmic_iwp", None, "cmic_iwp_pal", None, "cloud_ice_water_path", + "CMIC", "float64"), + "cmic_iwp_pps": ("pps", "cmic_iwp", None, "cmic_iwp_pal", None, "ice_water_path", + "CMIC", "float64"), + "pc": ("geo", "pc", None, "pc_pal", None, "precipitation_probability", "PC", "uint8"), + "crr": ("geo", "crr", None, "crr_pal", None, "convective_rain_rate", "CRR", "uint8"), + "crr_accum": ("geo", "crr_accum", None, "crr_pal", None, + "convective_precipitation_hourly_accumulation", "CRR", "uint8"), + "ishai_tpw": ("geo", "ishai_tpw", None, "ishai_tpw_pal", None, "total_precipitable_water", + "iSHAI", "float64"), + "ishai_shw": ("geo", "ishai_shw", None, "ishai_shw_pal", None, "showalter_index", + "iSHAI", "float64"), + "ishai_li": ("geo", "ishai_li", None, "ishai_li_pal", None, "lifted_index", + "iSHAI", "float64"), + "ci_prob30": ("geo", "ci_prob30", None, "ci_pal", None, "convection_initiation_prob30", + "CI", "float64"), + "ci_prob60": ("geo", "ci_prob60", None, "ci_pal", None, "convection_initiation_prob60", + "CI", "float64"), + "ci_prob90": ("geo", "ci_prob90", None, "ci_pal", None, "convection_initiation_prob90", + "CI", "float64"), + "asii_turb_trop_prob": ("geo", "asii_turb_trop_prob", None, "asii_turb_prob_pal", None, + "asii_prob", "ASII-NG", "float64"), + "MapCellCatType": ("geo", "MapCellCatType", None, "MapCellCatType_pal", None, + "rdt_cell_type", "RDT-CW", "uint8"), } @pytest.mark.parametrize( "data", - ['cma_geo', 'cma_pps', 'cma_extended_pps', 'cmaprob_pps', 'ct_geo', - 'ct_pps', 'ctth_alti_geo', 'ctth_alti_pps', 'ctth_pres_geo', - 'ctth_pres_pps', 'ctth_tempe_geo', 'ctth_tempe_pps', - 'cmic_phase_geo', 'cmic_phase_pps', 'cmic_reff_geo', - 'cmic_reff_pps', 'cmic_cot_geo', 'cmic_cot_pps', 'cmic_cwp_pps', - 'cmic_lwp_geo', 'cmic_lwp_pps', 'cmic_iwp_geo', 'cmic_iwp_pps', - 'pc', 'crr', 'crr_accum', 'ishai_tpw', 'ishai_shw', 'ishai_li', - 'ci_prob30', 'ci_prob60', 'ci_prob90', 'asii_turb_trop_prob', - 'MapCellCatType'] + ["cma_geo", "cma_pps", "cma_extended_pps", "cmaprob_pps", "ct_geo", + "ct_pps", "ctth_alti_geo", "ctth_alti_pps", "ctth_pres_geo", + "ctth_pres_pps", "ctth_tempe_geo", "ctth_tempe_pps", + "cmic_phase_geo", "cmic_phase_pps", "cmic_reff_geo", + "cmic_reff_pps", "cmic_cot_geo", "cmic_cot_pps", "cmic_cwp_pps", + "cmic_lwp_geo", "cmic_lwp_pps", "cmic_iwp_geo", "cmic_iwp_pps", + "pc", "crr", "crr_accum", "ishai_tpw", "ishai_shw", "ishai_li", + "ci_prob30", "ci_prob60", "ci_prob90", "asii_turb_trop_prob", + "MapCellCatType"] ) def test_nwcsaf_comps(fake_area, tmp_path, data): """Test loading NWCSAF composites.""" + from satpy import Scene from satpy.writers import get_enhanced_image - - from ... import Scene (flavour, dvname, altname, palettename, statusname, comp, filelabel, dtp) = _nwcsaf_geo_props[data] rng = (0, 100) if dtp == "uint8" else (-100, 1000) if flavour == "geo": @@ -645,9 +644,9 @@ def setup_method(self): """Create test data.""" data = da.arange(-100, 1000, 110).reshape(2, 5) rgb_data = np.stack([data, data, data]) - self.rgb = xr.DataArray(rgb_data, dims=('bands', 'y', 'x'), - coords={'bands': ['R', 'G', 'B']}, - attrs={'platform_name': 'Himawari-8'}) + self.rgb = xr.DataArray(rgb_data, dims=("bands", "y", "x"), + coords={"bands": ["R", "G", "B"]}, + attrs={"platform_name": "Himawari-8"}) def test_jma_true_color_reproduction(self): """Test the jma_true_color_reproduction enhancement.""" @@ -669,12 +668,12 @@ def test_jma_true_color_reproduction(self): np.testing.assert_almost_equal(img.data.compute(), expected) - self.rgb.attrs['platform_name'] = None + self.rgb.attrs["platform_name"] = None img = XRImage(self.rgb) with pytest.raises(ValueError, match="Missing platform name."): jma_true_color_reproduction(img) - self.rgb.attrs['platform_name'] = 'Fakesat' + self.rgb.attrs["platform_name"] = "Fakesat" img = XRImage(self.rgb) with pytest.raises(KeyError, match="No conversion matrix found for platform Fakesat"): jma_true_color_reproduction(img) diff --git a/satpy/tests/enhancement_tests/test_viirs.py b/satpy/tests/enhancement_tests/test_viirs.py index 5595266034..b73e5fc700 100644 --- a/satpy/tests/enhancement_tests/test_viirs.py +++ b/satpy/tests/enhancement_tests/test_viirs.py @@ -33,8 +33,8 @@ def setUp(self): """Create test data.""" data = np.arange(15, 301, 15).reshape(2, 10) data = da.from_array(data, chunks=(2, 10)) - self.da = xr.DataArray(data, dims=('y', 'x'), attrs={'test': 'test'}) - self.palette = {'colors': + self.da = xr.DataArray(data, dims=("y", "x"), attrs={"test": "test"}) + self.palette = {"colors": [[14, [0.0, 0.0, 0.0]], [15, [0.0, 0.0, 0.39215]], [16, [0.76862, 0.63529, 0.44705]], @@ -64,8 +64,8 @@ def setUp(self): [191, [1.0, 0.0, 0.0]], [200, [1.0, 0.0, 0.0]], [201, [0.0, 0.0, 0.0]]], - 'min_value': 0, - 'max_value': 201} + "min_value": 0, + "max_value": 201} def test_viirs(self): """Test VIIRS flood enhancement.""" diff --git a/satpy/tests/features/steps/steps-load.py b/satpy/tests/features/steps/steps-load.py index 9dfe9eb9cc..7e2d1829a2 100644 --- a/satpy/tests/features/steps/steps-load.py +++ b/satpy/tests/features/steps/steps-load.py @@ -25,31 +25,31 @@ use_step_matcher("re") -@given(u'data is available') +@given(u"data is available") def step_impl_data_available(context): """Make data available.""" - if not os.path.exists('/tmp/SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5'): - response = urlopen('https://zenodo.org/record/16355/files/' - 'SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5') - with open('/tmp/SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5', + if not os.path.exists("/tmp/SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5"): + response = urlopen("https://zenodo.org/record/16355/files/" + "SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5") + with open("/tmp/SVM02_npp_d20150311_t1122204_e1123446_b17451_c20150311113206961730_cspp_dev.h5", mode="w") as fp: fp.write(response.read()) - if not os.path.exists('/tmp/GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5'): - response = urlopen('https://zenodo.org/record/16355/files/' - 'GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5') - with open('/tmp/GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5', + if not os.path.exists("/tmp/GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5"): + response = urlopen("https://zenodo.org/record/16355/files/" + "GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5") + with open("/tmp/GMTCO_npp_d20150311_t1122204_e1123446_b17451_c20150311113205873710_cspp_dev.h5", mode="w") as fp: fp.write(response.read()) -@when(u'user loads the data without providing a config file') +@when(u"user loads the data without providing a config file") def step_impl_user_loads_no_config(context): """Load the data without a config.""" from datetime import datetime from satpy import Scene, find_files_and_readers os.chdir("/tmp/") - readers_files = find_files_and_readers(sensor='viirs', + readers_files = find_files_and_readers(sensor="viirs", start_time=datetime(2015, 3, 11, 11, 20), end_time=datetime(2015, 3, 11, 11, 26)) scn = Scene(filenames=readers_files) @@ -57,20 +57,20 @@ def step_impl_user_loads_no_config(context): context.scene = scn -@then(u'the data is available in a scene object') +@then(u"the data is available in a scene object") def step_impl_data_available_in_scene(context): """Check that the data is available in the scene.""" assert context.scene["M02"] is not None assert context.scene.get("M01") is None -@when(u'some items are not available') +@when(u"some items are not available") def step_impl_items_not_available(context): """Load some data.""" context.scene.load(["M01"]) -@when(u'user wants to know what data is available') +@when(u"user wants to know what data is available") def step_impl_user_checks_availability(context): """Check availability.""" from datetime import datetime @@ -84,7 +84,7 @@ def step_impl_user_checks_availability(context): context.available_dataset_ids = scn.available_dataset_ids() -@then(u'available datasets are returned') +@then(u"available datasets are returned") def step_impl_available_datasets_are_returned(context): """Check that datasets are returned.""" assert (len(context.available_dataset_ids) >= 5) @@ -98,13 +98,13 @@ def step_impl_datasets_with_same_name(context): from satpy import Scene from satpy.tests.utils import make_dataid scn = Scene() - scn[make_dataid(name='ds1', calibration='radiance')] = DataArray([[1, 2], [3, 4]]) - scn[make_dataid(name='ds1', resolution=500, calibration='reflectance')] = DataArray([[5, 6], [7, 8]]) - scn[make_dataid(name='ds1', resolution=250, calibration='reflectance')] = DataArray([[5, 6], [7, 8]]) - scn[make_dataid(name='ds1', resolution=1000, calibration='reflectance')] = DataArray([[5, 6], [7, 8]]) - scn[make_dataid(name='ds1', resolution=500, calibration='radiance', modifiers=('mod1',))] = \ + scn[make_dataid(name="ds1", calibration="radiance")] = DataArray([[1, 2], [3, 4]]) + scn[make_dataid(name="ds1", resolution=500, calibration="reflectance")] = DataArray([[5, 6], [7, 8]]) + scn[make_dataid(name="ds1", resolution=250, calibration="reflectance")] = DataArray([[5, 6], [7, 8]]) + scn[make_dataid(name="ds1", resolution=1000, calibration="reflectance")] = DataArray([[5, 6], [7, 8]]) + scn[make_dataid(name="ds1", resolution=500, calibration="radiance", modifiers=("mod1",))] = \ DataArray([[5, 6], [7, 8]]) - ds_id = make_dataid(name='ds1', resolution=1000, calibration='radiance', modifiers=('mod1', 'mod2')) + ds_id = make_dataid(name="ds1", resolution=1000, calibration="radiance", modifiers=("mod1", "mod2")) scn[ds_id] = DataArray([[5, 6], [7, 8]]) context.scene = scn @@ -112,10 +112,10 @@ def step_impl_datasets_with_same_name(context): @when("a dataset is retrieved by name") def step_impl_dataset_retrieved_by_name(context): """Use the Scene's getitem method to get a dataset.""" - context.returned_dataset = context.scene['ds1'] + context.returned_dataset = context.scene["ds1"] @then("the least modified version of the dataset is returned") def step_impl_least_modified_dataset_returned(context): """Check that the dataset should be one of the least modified datasets.""" - assert len(context.returned_dataset.attrs['modifiers']) == 0 + assert len(context.returned_dataset.attrs["modifiers"]) == 0 diff --git a/satpy/tests/features/steps/steps-real-load-process-write.py b/satpy/tests/features/steps/steps-real-load-process-write.py index d719d397e4..d99b167b97 100644 --- a/satpy/tests/features/steps/steps-real-load-process-write.py +++ b/satpy/tests/features/steps/steps-real-load-process-write.py @@ -80,10 +80,10 @@ def before_all(context): debug_on() -@given(u'{dformat} data is available') +@given(u"{dformat} data is available") def step_impl_input_files_exists(context, dformat): """Check that input data exists on disk.""" - data_path = os.path.join('test_data', dformat) + data_path = os.path.join("test_data", dformat) data_available = os.path.exists(data_path) if not data_available: context.scenario.skip(reason="No test data available for " + dformat) @@ -92,40 +92,40 @@ def step_impl_input_files_exists(context, dformat): context.data_path = data_path -@when(u'the user loads the {composite} composite') +@when(u"the user loads the {composite} composite") def step_impl_create_scene_and_load_single(context, composite): """Create a Scene and load a single composite.""" from satpy import Scene scn = Scene(reader=context.dformat, - filenames=get_all_files(os.path.join(context.data_path, 'data'), - '*')) + filenames=get_all_files(os.path.join(context.data_path, "data"), + "*")) scn.load([composite]) context.scn = scn context.composite = composite -@when(u'the user resamples the data to {area}') +@when(u"the user resamples the data to {area}") def step_impl_resample_scene(context, area): """Resample the scene to an area or use the native resampler.""" - if area != '-': + if area != "-": context.lscn = context.scn.resample(area) else: - context.lscn = context.scn.resample(resampler='native') + context.lscn = context.scn.resample(resampler="native") context.area = area -@when(u'the user saves the composite to disk') +@when(u"the user saves the composite to disk") def step_impl_save_to_png(context): """Call Scene.save_dataset to write a PNG image.""" - with NamedTemporaryFile(suffix='.png', delete=False) as tmp_file: + with NamedTemporaryFile(suffix=".png", delete=False) as tmp_file: context.lscn.save_dataset(context.composite, filename=tmp_file.name) context.new_filename = tmp_file.name -@then(u'the resulting image should match the reference image') +@then(u"the resulting image should match the reference image") def step_impl_compare_two_png_images(context): """Compare two PNG image files.""" - if context.area == '-': + if context.area == "-": ref_filename = context.composite + ".png" else: ref_filename = context.composite + "_" + context.area + ".png" diff --git a/satpy/tests/features/steps/steps-save.py b/satpy/tests/features/steps/steps-save.py index b42d8751a2..dbfd8040c0 100644 --- a/satpy/tests/features/steps/steps-save.py +++ b/satpy/tests/features/steps/steps-save.py @@ -36,7 +36,7 @@ def step_impl_create_scene_one_dataset(context): from satpy import Scene scn = Scene() - scn["MyDataset"] = DataArray([[1, 2], [3, 4]], dims=['y', 'x']) + scn["MyDataset"] = DataArray([[1, 2], [3, 4]], dims=["y", "x"]) context.scene = scn @@ -48,7 +48,7 @@ def step_impl_scene_show(context): context (behave.runner.Context): Test context """ - with patch('trollimage.xrimage.XRImage.show') as mock_show: + with patch("trollimage.xrimage.XRImage.show") as mock_show: context.scene.show("MyDataset") mock_show.assert_called_once_with() @@ -100,8 +100,8 @@ def step_impl_create_scene_two_datasets(context): from satpy import Scene scn = Scene() - scn["MyDataset"] = DataArray([[1, 2], [3, 4]], dims=['y', 'x']) - scn["MyDataset2"] = DataArray([[5, 6], [7, 8]], dims=['y', 'x']) + scn["MyDataset"] = DataArray([[1, 2], [3, 4]], dims=["y", "x"]) + scn["MyDataset2"] = DataArray([[5, 6], [7, 8]], dims=["y", "x"]) context.scene = scn diff --git a/satpy/tests/modifier_tests/test_angles.py b/satpy/tests/modifier_tests/test_angles.py index cd5082a5b7..2ebebacbc7 100644 --- a/satpy/tests/modifier_tests/test_angles.py +++ b/satpy/tests/modifier_tests/test_angles.py @@ -79,9 +79,9 @@ def _get_angle_test_data(area_def: Optional[Union[AreaDefinition, StackedAreaDef vis = xr.DataArray(data, dims=dims, attrs={ - 'area': area_def, - 'start_time': stime, - 'orbital_parameters': orb_params, + "area": area_def, + "start_time": stime, + "orbital_parameters": orb_params, }) return vis @@ -318,7 +318,7 @@ def test_cached_result_numpy_fails(self, tmp_path): def _fake_func(shape, chunks): return np.zeros(shape) - with pytest.raises(ValueError), \ + with pytest.raises(ValueError, match="Zarr caching currently only supports dask arrays. Got .*"), \ satpy.config.set(cache_lonlats=True, cache_dir=str(tmp_path)): _fake_func((5, 5), ((5,), (5,))) diff --git a/satpy/tests/modifier_tests/test_crefl.py b/satpy/tests/modifier_tests/test_crefl.py index c842df701e..dc9f4a232a 100644 --- a/satpy/tests/modifier_tests/test_crefl.py +++ b/satpy/tests/modifier_tests/test_crefl.py @@ -23,7 +23,7 @@ from dask import array as da from pyresample.geometry import AreaDefinition -from ..utils import assert_maximum_dask_computes +from satpy.tests.utils import assert_maximum_dask_computes # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -57,7 +57,7 @@ def _mock_and_create_dem_file(tmpdir, url, var_name, fill_value=None): def _mock_dem_retrieve(tmpdir, url): - rmock_obj = mock.patch('satpy.modifiers._crefl.retrieve') + rmock_obj = mock.patch("satpy.modifiers._crefl.retrieve") rmock = rmock_obj.start() dem_fn = str(tmpdir.join(url)) rmock.return_value = dem_fn @@ -74,17 +74,17 @@ def _create_fake_dem_file(dem_fn, var_name, fill_value): h.end() -def _make_viirs_xarray(data, area, name, standard_name, wavelength=None, units='degrees', calibration=None): - return xr.DataArray(data, dims=('y', 'x'), +def _make_viirs_xarray(data, area, name, standard_name, wavelength=None, units="degrees", calibration=None): + return xr.DataArray(data, dims=("y", "x"), attrs={ - 'start_orbit': 1708, 'end_orbit': 1708, 'wavelength': wavelength, - 'modifiers': None, 'calibration': calibration, - 'resolution': 371, 'name': name, - 'standard_name': standard_name, 'platform_name': 'Suomi-NPP', - 'polarization': None, 'sensor': 'viirs', 'units': units, - 'start_time': datetime(2012, 2, 25, 18, 1, 24, 570942), - 'end_time': datetime(2012, 2, 25, 18, 11, 21, 175760), 'area': area, - 'ancillary_variables': [] + "start_orbit": 1708, "end_orbit": 1708, "wavelength": wavelength, + "modifiers": None, "calibration": calibration, + "resolution": 371, "name": name, + "standard_name": standard_name, "platform_name": "Suomi-NPP", + "polarization": None, "sensor": "viirs", "units": units, + "start_time": datetime(2012, 2, 25, 18, 1, 24, 570942), + "end_time": datetime(2012, 2, 25, 18, 11, 21, 175760), "area": area, + "ancillary_variables": [] }) @@ -97,9 +97,9 @@ def data_area_ref_corrector(): rows = 3 cols = 5 area = AreaDefinition( - 'some_area_name', 'On-the-fly area', 'geosabii', - {'a': '6378137.0', 'b': '6356752.31414', 'h': '35786023.0', 'lon_0': '-89.5', 'proj': 'geos', 'sweep': 'x', - 'units': 'm'}, + "some_area_name", "On-the-fly area", "geosabii", + {"a": "6378137.0", "b": "6356752.31414", "h": "35786023.0", "lon_0": "-89.5", "proj": "geos", "sweep": "x", + "units": "m"}, cols, rows, (-5434894.954752679, -5434894.964451744, 5434894.964451744, 5434894.954752679)) @@ -135,39 +135,39 @@ def test_reflectance_corrector_abi(self, name, wavelength, resolution, exp_mean, from satpy.modifiers._crefl import ReflectanceCorrector from satpy.tests.utils import make_dsq ref_cor = ReflectanceCorrector(optional_prerequisites=[ - make_dsq(name='satellite_azimuth_angle'), - make_dsq(name='satellite_zenith_angle'), - make_dsq(name='solar_azimuth_angle'), - make_dsq(name='solar_zenith_angle')], name=name, prerequisites=[], - wavelength=wavelength, resolution=resolution, calibration='reflectance', - modifiers=('sunz_corrected', 'rayleigh_corrected_crefl',), sensor='abi') - - assert ref_cor.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl') - assert ref_cor.attrs['calibration'] == 'reflectance' - assert ref_cor.attrs['wavelength'] == wavelength - assert ref_cor.attrs['name'] == name - assert ref_cor.attrs['resolution'] == resolution - assert ref_cor.attrs['sensor'] == 'abi' - assert ref_cor.attrs['prerequisites'] == [] - assert ref_cor.attrs['optional_prerequisites'] == [ - make_dsq(name='satellite_azimuth_angle'), - make_dsq(name='satellite_zenith_angle'), - make_dsq(name='solar_azimuth_angle'), - make_dsq(name='solar_zenith_angle')] + make_dsq(name="satellite_azimuth_angle"), + make_dsq(name="satellite_zenith_angle"), + make_dsq(name="solar_azimuth_angle"), + make_dsq(name="solar_zenith_angle")], name=name, prerequisites=[], + wavelength=wavelength, resolution=resolution, calibration="reflectance", + modifiers=("sunz_corrected", "rayleigh_corrected_crefl",), sensor="abi") + + assert ref_cor.attrs["modifiers"] == ("sunz_corrected", "rayleigh_corrected_crefl") + assert ref_cor.attrs["calibration"] == "reflectance" + assert ref_cor.attrs["wavelength"] == wavelength + assert ref_cor.attrs["name"] == name + assert ref_cor.attrs["resolution"] == resolution + assert ref_cor.attrs["sensor"] == "abi" + assert ref_cor.attrs["prerequisites"] == [] + assert ref_cor.attrs["optional_prerequisites"] == [ + make_dsq(name="satellite_azimuth_angle"), + make_dsq(name="satellite_zenith_angle"), + make_dsq(name="solar_azimuth_angle"), + make_dsq(name="solar_zenith_angle")] area, dnb = self.data_area_ref_corrector() c01 = xr.DataArray(dnb, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'platform_name': 'GOES-16', - 'calibration': 'reflectance', 'units': '%', 'wavelength': wavelength, - 'name': name, 'resolution': resolution, 'sensor': 'abi', - 'start_time': '2017-09-20 17:30:40.800000', 'end_time': '2017-09-20 17:41:17.500000', - 'area': area, 'ancillary_variables': [], - 'orbital_parameters': { - 'satellite_nominal_longitude': -89.5, - 'satellite_nominal_latitude': 0.0, - 'satellite_nominal_altitude': 35786023.4375, + "platform_name": "GOES-16", + "calibration": "reflectance", "units": "%", "wavelength": wavelength, + "name": name, "resolution": resolution, "sensor": "abi", + "start_time": "2017-09-20 17:30:40.800000", "end_time": "2017-09-20 17:41:17.500000", + "area": area, "ancillary_variables": [], + "orbital_parameters": { + "satellite_nominal_longitude": -89.5, + "satellite_nominal_latitude": 0.0, + "satellite_nominal_altitude": 35786023.4375, }, }) with assert_maximum_dask_computes(0): @@ -175,18 +175,18 @@ def test_reflectance_corrector_abi(self, name, wavelength, resolution, exp_mean, assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl') - assert res.attrs['platform_name'] == 'GOES-16' - assert res.attrs['calibration'] == 'reflectance' - assert res.attrs['units'] == '%' - assert res.attrs['wavelength'] == wavelength - assert res.attrs['name'] == name - assert res.attrs['resolution'] == resolution - assert res.attrs['sensor'] == 'abi' - assert res.attrs['start_time'] == '2017-09-20 17:30:40.800000' - assert res.attrs['end_time'] == '2017-09-20 17:41:17.500000' - assert res.attrs['area'] == area - assert res.attrs['ancillary_variables'] == [] + assert res.attrs["modifiers"] == ("sunz_corrected", "rayleigh_corrected_crefl") + assert res.attrs["platform_name"] == "GOES-16" + assert res.attrs["calibration"] == "reflectance" + assert res.attrs["units"] == "%" + assert res.attrs["wavelength"] == wavelength + assert res.attrs["name"] == name + assert res.attrs["resolution"] == resolution + assert res.attrs["sensor"] == "abi" + assert res.attrs["start_time"] == "2017-09-20 17:30:40.800000" + assert res.attrs["end_time"] == "2017-09-20 17:41:17.500000" + assert res.attrs["area"] == area + assert res.attrs["ancillary_variables"] == [] data = res.values unique = np.unique(data[~np.isnan(data)]) np.testing.assert_allclose(np.nanmean(data), exp_mean, rtol=1e-5) @@ -194,7 +194,7 @@ def test_reflectance_corrector_abi(self, name, wavelength, resolution, exp_mean, np.testing.assert_allclose(unique, exp_unique, rtol=1e-5) @pytest.mark.parametrize( - 'url,dem_mock_cm,dem_sds', + ("url", "dem_mock_cm", "dem_sds"), [ (None, mock_cmgdem, "average elevation"), ("CMGDEM.hdf", mock_cmgdem, "averaged elevation"), @@ -207,62 +207,62 @@ def test_reflectance_corrector_viirs(self, tmpdir, url, dem_mock_cm, dem_sds): ref_cor = ReflectanceCorrector( optional_prerequisites=[ - make_dsq(name='satellite_azimuth_angle'), - make_dsq(name='satellite_zenith_angle'), - make_dsq(name='solar_azimuth_angle'), - make_dsq(name='solar_zenith_angle') + make_dsq(name="satellite_azimuth_angle"), + make_dsq(name="satellite_zenith_angle"), + make_dsq(name="solar_azimuth_angle"), + make_dsq(name="solar_zenith_angle") ], - name='I01', + name="I01", prerequisites=[], wavelength=(0.6, 0.64, 0.68), resolution=371, - calibration='reflectance', - modifiers=('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband'), - sensor='viirs', + calibration="reflectance", + modifiers=("sunz_corrected_iband", "rayleigh_corrected_crefl_iband"), + sensor="viirs", url=url, dem_sds=dem_sds, ) - assert ref_cor.attrs['modifiers'] == ('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband') - assert ref_cor.attrs['calibration'] == 'reflectance' - assert ref_cor.attrs['wavelength'] == (0.6, 0.64, 0.68) - assert ref_cor.attrs['name'] == 'I01' - assert ref_cor.attrs['resolution'] == 371 - assert ref_cor.attrs['sensor'] == 'viirs' - assert ref_cor.attrs['prerequisites'] == [] - assert ref_cor.attrs['optional_prerequisites'] == [ - make_dsq(name='satellite_azimuth_angle'), - make_dsq(name='satellite_zenith_angle'), - make_dsq(name='solar_azimuth_angle'), - make_dsq(name='solar_zenith_angle')] + assert ref_cor.attrs["modifiers"] == ("sunz_corrected_iband", "rayleigh_corrected_crefl_iband") + assert ref_cor.attrs["calibration"] == "reflectance" + assert ref_cor.attrs["wavelength"] == (0.6, 0.64, 0.68) + assert ref_cor.attrs["name"] == "I01" + assert ref_cor.attrs["resolution"] == 371 + assert ref_cor.attrs["sensor"] == "viirs" + assert ref_cor.attrs["prerequisites"] == [] + assert ref_cor.attrs["optional_prerequisites"] == [ + make_dsq(name="satellite_azimuth_angle"), + make_dsq(name="satellite_zenith_angle"), + make_dsq(name="solar_azimuth_angle"), + make_dsq(name="solar_zenith_angle")] area, data = self.data_area_ref_corrector() - c01 = _make_viirs_xarray(data, area, 'I01', 'toa_bidirectional_reflectance', - wavelength=(0.6, 0.64, 0.68), units='%', - calibration='reflectance') - c02 = _make_viirs_xarray(data, area, 'satellite_azimuth_angle', 'sensor_azimuth_angle') - c03 = _make_viirs_xarray(data, area, 'satellite_zenith_angle', 'sensor_zenith_angle') - c04 = _make_viirs_xarray(data, area, 'solar_azimuth_angle', 'solar_azimuth_angle') - c05 = _make_viirs_xarray(data, area, 'solar_zenith_angle', 'solar_zenith_angle') + c01 = _make_viirs_xarray(data, area, "I01", "toa_bidirectional_reflectance", + wavelength=(0.6, 0.64, 0.68), units="%", + calibration="reflectance") + c02 = _make_viirs_xarray(data, area, "satellite_azimuth_angle", "sensor_azimuth_angle") + c03 = _make_viirs_xarray(data, area, "satellite_zenith_angle", "sensor_zenith_angle") + c04 = _make_viirs_xarray(data, area, "solar_azimuth_angle", "solar_azimuth_angle") + c05 = _make_viirs_xarray(data, area, "solar_zenith_angle", "solar_zenith_angle") with dem_mock_cm(tmpdir, url), assert_maximum_dask_computes(0): res = ref_cor([c01], [c02, c03, c04, c05]) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['wavelength'] == (0.6, 0.64, 0.68) - assert res.attrs['modifiers'] == ('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband') - assert res.attrs['calibration'] == 'reflectance' - assert res.attrs['resolution'] == 371 - assert res.attrs['name'] == 'I01' - assert res.attrs['standard_name'] == 'toa_bidirectional_reflectance' - assert res.attrs['platform_name'] == 'Suomi-NPP' - assert res.attrs['sensor'] == 'viirs' - assert res.attrs['units'] == '%' - assert res.attrs['start_time'] == datetime(2012, 2, 25, 18, 1, 24, 570942) - assert res.attrs['end_time'] == datetime(2012, 2, 25, 18, 11, 21, 175760) - assert res.attrs['area'] == area - assert res.attrs['ancillary_variables'] == [] + assert res.attrs["wavelength"] == (0.6, 0.64, 0.68) + assert res.attrs["modifiers"] == ("sunz_corrected_iband", "rayleigh_corrected_crefl_iband") + assert res.attrs["calibration"] == "reflectance" + assert res.attrs["resolution"] == 371 + assert res.attrs["name"] == "I01" + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" + assert res.attrs["platform_name"] == "Suomi-NPP" + assert res.attrs["sensor"] == "viirs" + assert res.attrs["units"] == "%" + assert res.attrs["start_time"] == datetime(2012, 2, 25, 18, 1, 24, 570942) + assert res.attrs["end_time"] == datetime(2012, 2, 25, 18, 11, 21, 175760) + assert res.attrs["area"] == area + assert res.attrs["ancillary_variables"] == [] data = res.values assert abs(np.mean(data) - 51.12750267805715) < 1e-6 assert data.shape == (3, 5) @@ -273,64 +273,64 @@ def test_reflectance_corrector_modis(self): """Test ReflectanceCorrector modifier with MODIS data.""" from satpy.modifiers._crefl import ReflectanceCorrector from satpy.tests.utils import make_dsq - sataa_did = make_dsq(name='satellite_azimuth_angle') - satza_did = make_dsq(name='satellite_zenith_angle') - solaa_did = make_dsq(name='solar_azimuth_angle') - solza_did = make_dsq(name='solar_zenith_angle') + sataa_did = make_dsq(name="satellite_azimuth_angle") + satza_did = make_dsq(name="satellite_zenith_angle") + solaa_did = make_dsq(name="solar_azimuth_angle") + solza_did = make_dsq(name="solar_zenith_angle") ref_cor = ReflectanceCorrector( - optional_prerequisites=[sataa_did, satza_did, solaa_did, solza_did], name='1', - prerequisites=[], wavelength=(0.62, 0.645, 0.67), resolution=250, calibration='reflectance', - modifiers=('sunz_corrected', 'rayleigh_corrected_crefl'), sensor='modis') - assert ref_cor.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl') - assert ref_cor.attrs['calibration'] == 'reflectance' - assert ref_cor.attrs['wavelength'] == (0.62, 0.645, 0.67) - assert ref_cor.attrs['name'] == '1' - assert ref_cor.attrs['resolution'] == 250 - assert ref_cor.attrs['sensor'] == 'modis' - assert ref_cor.attrs['prerequisites'] == [] - assert ref_cor.attrs['optional_prerequisites'] == [ - make_dsq(name='satellite_azimuth_angle'), - make_dsq(name='satellite_zenith_angle'), - make_dsq(name='solar_azimuth_angle'), - make_dsq(name='solar_zenith_angle')] + optional_prerequisites=[sataa_did, satza_did, solaa_did, solza_did], name="1", + prerequisites=[], wavelength=(0.62, 0.645, 0.67), resolution=250, calibration="reflectance", + modifiers=("sunz_corrected", "rayleigh_corrected_crefl"), sensor="modis") + assert ref_cor.attrs["modifiers"] == ("sunz_corrected", "rayleigh_corrected_crefl") + assert ref_cor.attrs["calibration"] == "reflectance" + assert ref_cor.attrs["wavelength"] == (0.62, 0.645, 0.67) + assert ref_cor.attrs["name"] == "1" + assert ref_cor.attrs["resolution"] == 250 + assert ref_cor.attrs["sensor"] == "modis" + assert ref_cor.attrs["prerequisites"] == [] + assert ref_cor.attrs["optional_prerequisites"] == [ + make_dsq(name="satellite_azimuth_angle"), + make_dsq(name="satellite_zenith_angle"), + make_dsq(name="solar_azimuth_angle"), + make_dsq(name="solar_zenith_angle")] area, dnb = self.data_area_ref_corrector() def make_xarray(name, calibration, wavelength=None, modifiers=None, resolution=1000): return xr.DataArray(dnb, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'wavelength': wavelength, 'level': None, 'modifiers': modifiers, - 'calibration': calibration, 'resolution': resolution, - 'name': name, 'coordinates': ['longitude', 'latitude'], - 'platform_name': 'EOS-Aqua', 'polarization': None, 'sensor': 'modis', - 'units': '%', 'start_time': datetime(2012, 8, 13, 18, 46, 1, 439838), - 'end_time': datetime(2012, 8, 13, 18, 57, 47, 746296), 'area': area, - 'ancillary_variables': [] + "wavelength": wavelength, "level": None, "modifiers": modifiers, + "calibration": calibration, "resolution": resolution, + "name": name, "coordinates": ["longitude", "latitude"], + "platform_name": "EOS-Aqua", "polarization": None, "sensor": "modis", + "units": "%", "start_time": datetime(2012, 8, 13, 18, 46, 1, 439838), + "end_time": datetime(2012, 8, 13, 18, 57, 47, 746296), "area": area, + "ancillary_variables": [] }) - c01 = make_xarray('1', 'reflectance', wavelength=(0.62, 0.645, 0.67), modifiers='sunz_corrected', + c01 = make_xarray("1", "reflectance", wavelength=(0.62, 0.645, 0.67), modifiers="sunz_corrected", resolution=500) - c02 = make_xarray('satellite_azimuth_angle', None) - c03 = make_xarray('satellite_zenith_angle', None) - c04 = make_xarray('solar_azimuth_angle', None) - c05 = make_xarray('solar_zenith_angle', None) + c02 = make_xarray("satellite_azimuth_angle", None) + c03 = make_xarray("satellite_zenith_angle", None) + c04 = make_xarray("solar_azimuth_angle", None) + c05 = make_xarray("solar_zenith_angle", None) res = ref_cor([c01], [c02, c03, c04, c05]) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['wavelength'] == (0.62, 0.645, 0.67) - assert res.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl',) - assert res.attrs['calibration'] == 'reflectance' - assert res.attrs['resolution'] == 500 - assert res.attrs['name'] == '1' - assert res.attrs['platform_name'] == 'EOS-Aqua' - assert res.attrs['sensor'] == 'modis' - assert res.attrs['units'] == '%' - assert res.attrs['start_time'] == datetime(2012, 8, 13, 18, 46, 1, 439838) - assert res.attrs['end_time'] == datetime(2012, 8, 13, 18, 57, 47, 746296) - assert res.attrs['area'] == area - assert res.attrs['ancillary_variables'] == [] + assert res.attrs["wavelength"] == (0.62, 0.645, 0.67) + assert res.attrs["modifiers"] == ("sunz_corrected", "rayleigh_corrected_crefl",) + assert res.attrs["calibration"] == "reflectance" + assert res.attrs["resolution"] == 500 + assert res.attrs["name"] == "1" + assert res.attrs["platform_name"] == "EOS-Aqua" + assert res.attrs["sensor"] == "modis" + assert res.attrs["units"] == "%" + assert res.attrs["start_time"] == datetime(2012, 8, 13, 18, 46, 1, 439838) + assert res.attrs["end_time"] == datetime(2012, 8, 13, 18, 57, 47, 746296) + assert res.attrs["area"] == area + assert res.attrs["ancillary_variables"] == [] data = res.values assert abs(np.mean(data) - 52.09372623964498) < 1e-6 assert data.shape == (3, 5) @@ -341,12 +341,12 @@ def test_reflectance_corrector_bad_prereqs(self): """Test ReflectanceCorrector modifier with wrong number of inputs.""" from satpy.modifiers._crefl import ReflectanceCorrector ref_cor = ReflectanceCorrector("test") - pytest.raises(ValueError, ref_cor, [1], [2, 3, 4]) - pytest.raises(ValueError, ref_cor, [1, 2, 3, 4], []) - pytest.raises(ValueError, ref_cor, [], [1, 2, 3, 4]) + pytest.raises(ValueError, ref_cor, [1], [2, 3, 4], match="Not sure how to handle provided dependencies..*") + pytest.raises(ValueError, ref_cor, [1, 2, 3, 4], [], match="Not sure how to handle provided dependencies..*") + pytest.raises(ValueError, ref_cor, [], [1, 2, 3, 4], match="Not sure how to handle provided dependencies..*") @pytest.mark.parametrize( - 'url,dem_mock_cm,dem_sds', + ("url", "dem_mock_cm", "dem_sds"), [ (None, mock_cmgdem, "average elevation"), ("CMGDEM.hdf", mock_cmgdem, "averaged elevation"), @@ -364,31 +364,31 @@ def test_reflectance_corrector_different_chunks(self, tmpdir, url, dem_mock_cm, ref_cor = ReflectanceCorrector( optional_prerequisites=[ - make_dsq(name='satellite_azimuth_angle'), - make_dsq(name='satellite_zenith_angle'), - make_dsq(name='solar_azimuth_angle'), - make_dsq(name='solar_zenith_angle') + make_dsq(name="satellite_azimuth_angle"), + make_dsq(name="satellite_zenith_angle"), + make_dsq(name="solar_azimuth_angle"), + make_dsq(name="solar_zenith_angle") ], - name='I01', + name="I01", prerequisites=[], wavelength=(0.6, 0.64, 0.68), resolution=371, - calibration='reflectance', - modifiers=('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband'), - sensor='viirs', + calibration="reflectance", + modifiers=("sunz_corrected_iband", "rayleigh_corrected_crefl_iband"), + sensor="viirs", url=url, dem_sds=dem_sds, ) area, data = self.data_area_ref_corrector() - c01 = _make_viirs_xarray(data, area, 'I01', 'toa_bidirectional_reflectance', - wavelength=(0.6, 0.64, 0.68), units='%', - calibration='reflectance') - c02 = _make_viirs_xarray(data, area, 'satellite_azimuth_angle', 'sensor_azimuth_angle') + c01 = _make_viirs_xarray(data, area, "I01", "toa_bidirectional_reflectance", + wavelength=(0.6, 0.64, 0.68), units="%", + calibration="reflectance") + c02 = _make_viirs_xarray(data, area, "satellite_azimuth_angle", "sensor_azimuth_angle") c02.data = c02.data.rechunk((1, -1)) - c03 = _make_viirs_xarray(data, area, 'satellite_zenith_angle', 'sensor_zenith_angle') - c04 = _make_viirs_xarray(data, area, 'solar_azimuth_angle', 'solar_azimuth_angle') - c05 = _make_viirs_xarray(data, area, 'solar_zenith_angle', 'solar_zenith_angle') + c03 = _make_viirs_xarray(data, area, "satellite_zenith_angle", "sensor_zenith_angle") + c04 = _make_viirs_xarray(data, area, "solar_azimuth_angle", "solar_azimuth_angle") + c05 = _make_viirs_xarray(data, area, "solar_zenith_angle", "solar_zenith_angle") with dem_mock_cm(tmpdir, url): res = ref_cor([c01], [c02, c03, c04, c05]) diff --git a/satpy/tests/modifier_tests/test_parallax.py b/satpy/tests/modifier_tests/test_parallax.py index 316192421c..f1385e9b18 100644 --- a/satpy/tests/modifier_tests/test_parallax.py +++ b/satpy/tests/modifier_tests/test_parallax.py @@ -30,8 +30,7 @@ from pyresample import create_area_def import satpy.resample - -from ...writers import get_enhanced_image +from satpy.writers import get_enhanced_image # NOTE: # The following fixtures are not defined in this file, but are used and injected by Pytest: @@ -40,7 +39,7 @@ # - request -@pytest.fixture +@pytest.fixture() def fake_tle(): """Produce fake Two Line Element (TLE) object from pyorbital.""" return pyorbital.tlefile.Tle( @@ -88,7 +87,7 @@ class TestForwardParallax: def test_get_parallax_corrected_lonlats_ssp(self): """Test that at SSP, parallax correction does nothing.""" - from ...modifiers.parallax import get_parallax_corrected_lonlats + from satpy.modifiers.parallax import get_parallax_corrected_lonlats sat_lat = sat_lon = lon = lat = 0. height = 5000. # m sat_alt = 30_000_000. # m @@ -98,7 +97,7 @@ def test_get_parallax_corrected_lonlats_ssp(self): def test_get_parallax_corrected_lonlats_clearsky(self): """Test parallax correction for clearsky case (returns NaN).""" - from ...modifiers.parallax import get_parallax_corrected_lonlats + from satpy.modifiers.parallax import get_parallax_corrected_lonlats sat_lat = sat_lon = 0 lat = np.linspace(-20, 20, 25).reshape(5, 5) lon = np.linspace(-20, 20, 25).reshape(5, 5).T @@ -110,11 +109,11 @@ def test_get_parallax_corrected_lonlats_clearsky(self): assert np.isnan(corr_lon).all() assert np.isnan(corr_lat).all() - @pytest.mark.parametrize("lat,lon", [(0, 0), (0, 40), (0, 179.9)]) + @pytest.mark.parametrize(("lat", "lon"), [(0, 0), (0, 40), (0, 179.9)]) @pytest.mark.parametrize("resolution", [0.01, 0.5, 10]) def test_get_parallax_corrected_lonlats_cloudy_ssp(self, lat, lon, resolution): """Test parallax correction for fully cloudy scene at SSP.""" - from ...modifiers.parallax import get_parallax_corrected_lonlats + from satpy.modifiers.parallax import get_parallax_corrected_lonlats N = 5 lats = np.linspace(lat-N*resolution, lat+N*resolution, 25).reshape(N, N) @@ -145,7 +144,7 @@ def test_get_parallax_corrected_lonlats_cloudy_ssp(self, lat, lon, resolution): def test_get_parallax_corrected_lonlats_cloudy_slant(self): """Test parallax correction for fully cloudy scene (not SSP).""" - from ...modifiers.parallax import get_parallax_corrected_lonlats + from satpy.modifiers.parallax import get_parallax_corrected_lonlats sat_lat = sat_lon = 0 lat = np.linspace(-20, 20, 25).reshape(5, 5) lon = np.linspace(-20, 20, 25).reshape(5, 5).T @@ -161,7 +160,7 @@ def test_get_parallax_corrected_lonlats_cloudy_slant(self): def test_get_parallax_corrected_lonlats_mixed(self): """Test parallax correction for mixed cloudy case.""" - from ...modifiers.parallax import get_parallax_corrected_lonlats + from satpy.modifiers.parallax import get_parallax_corrected_lonlats sat_lon = sat_lat = 0 sat_alt = 35_785_831.0 # m @@ -189,7 +188,7 @@ def test_get_parallax_corrected_lonlats_horizon(self): Test the rather unlikely case of a satellite elevation of exactly 0 """ - from ...modifiers.parallax import get_parallax_corrected_lonlats + from satpy.modifiers.parallax import get_parallax_corrected_lonlats sat_lat = sat_lon = lon = lat = 0. height = 5000. sat_alt = 30_000_000. @@ -200,7 +199,7 @@ def test_get_parallax_corrected_lonlats_horizon(self): def test_get_surface_parallax_displacement(self): """Test surface parallax displacement.""" - from ...modifiers.parallax import get_surface_parallax_displacement + from satpy.modifiers.parallax import get_surface_parallax_displacement val = get_surface_parallax_displacement( 0, 0, 36_000_000, 0, 10, 10_000) @@ -215,18 +214,18 @@ class TestParallaxCorrectionClass: @pytest.mark.parametrize("resolution", [0.05, 1, 10]) def test_init_parallaxcorrection(self, center, sizes, resolution): """Test that ParallaxCorrection class can be instantiated.""" - from ...modifiers.parallax import ParallaxCorrection + from satpy.modifiers.parallax import ParallaxCorrection fake_area = _get_fake_areas(center, sizes, resolution)[0] pc = ParallaxCorrection(fake_area) assert pc.base_area == fake_area - @pytest.mark.parametrize("sat_pos,ar_pos", + @pytest.mark.parametrize(("sat_pos", "ar_pos"), [((0, 0), (0, 0)), ((0, 0), (40, 0))]) @pytest.mark.parametrize("resolution", [0.01, 0.5, 10]) def test_correct_area_clearsky(self, sat_pos, ar_pos, resolution, caplog): """Test that ParallaxCorrection doesn't change clearsky geolocation.""" - from ...modifiers.parallax import ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import ParallaxCorrection + from satpy.tests.utils import make_fake_scene (sat_lat, sat_lon) = sat_pos (ar_lat, ar_lon) = ar_pos small = 5 @@ -248,14 +247,14 @@ def test_correct_area_clearsky(self, sat_pos, ar_pos, resolution, caplog): new_area.get_lonlats(), fake_area_small.get_lonlats()) - @pytest.mark.parametrize("lat,lon", + @pytest.mark.parametrize(("lat", "lon"), [(0, 0), (0, 40), (0, 180), (90, 0)]) # relevant for Арктика satellites @pytest.mark.parametrize("resolution", [0.01, 0.5, 10]) def test_correct_area_ssp(self, lat, lon, resolution): """Test that ParallaxCorrection doesn't touch SSP.""" - from ...modifiers.parallax import ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import ParallaxCorrection + from satpy.tests.utils import make_fake_scene codes = { (0, 0): 4326, (0, 40): 4326, @@ -298,8 +297,8 @@ def test_correct_area_ssp(self, lat, lon, resolution): @pytest.mark.parametrize("daskify", [False, True]) def test_correct_area_partlycloudy(self, daskify): """Test ParallaxCorrection for partly cloudy situation.""" - from ...modifiers.parallax import ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import ParallaxCorrection + from satpy.tests.utils import make_fake_scene small = 5 large = 9 (fake_area_small, fake_area_large) = _get_fake_areas( @@ -346,11 +345,11 @@ def test_correct_area_partlycloudy(self, daskify): [49.86860622, 49.9097198, 49.90971976, 49.9097198, 49.88231496]]), rtol=1e-6) - @pytest.mark.parametrize("res1,res2", [(0.08, 0.3), (0.3, 0.08)]) + @pytest.mark.parametrize(("res1", "res2"), [(0.08, 0.3), (0.3, 0.08)]) def test_correct_area_clearsky_different_resolutions(self, res1, res2): """Test clearsky correction when areas have different resolutions.""" - from ...modifiers.parallax import ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import ParallaxCorrection + from satpy.tests.utils import make_fake_scene # areas with different resolutions, but same coverage @@ -385,8 +384,8 @@ def test_correct_area_clearsky_different_resolutions(self, res1, res2): @pytest.mark.xfail(reason="awaiting pyresample fixes") def test_correct_area_cloudy_no_overlap(self, ): """Test cloudy correction when areas have no overlap.""" - from ...modifiers.parallax import MissingHeightError, ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import MissingHeightError, ParallaxCorrection + from satpy.tests.utils import make_fake_scene areas_00 = _get_fake_areas((0, 40), [5, 9], 0.1) areas_shift = _get_fake_areas((90, 20), [5, 9], 0.1) fake_area_small = areas_00[0] @@ -405,8 +404,8 @@ def test_correct_area_cloudy_no_overlap(self, ): @pytest.mark.xfail(reason="awaiting pyresample fixes") def test_correct_area_cloudy_partly_shifted(self, ): """Test cloudy correction when areas overlap only partly.""" - from ...modifiers.parallax import IncompleteHeightWarning, ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import IncompleteHeightWarning, ParallaxCorrection + from satpy.tests.utils import make_fake_scene areas_00 = _get_fake_areas((0, 40), [5, 9], 0.1) areas_shift = _get_fake_areas((0.5, 40), [5, 9], 0.1) fake_area_small = areas_00[0] @@ -426,8 +425,8 @@ def test_correct_area_cloudy_partly_shifted(self, ): def test_correct_area_cloudy_same_area(self, ): """Test cloudy correction when areas are the same.""" - from ...modifiers.parallax import ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import ParallaxCorrection + from satpy.tests.utils import make_fake_scene area = _get_fake_areas((0, 0), [9], 0.1)[0] sc = make_fake_scene( @@ -446,8 +445,8 @@ def test_correct_area_no_orbital_parameters(self, caplog, fake_tle): on satellite location directly. Rather, they include platform name, sensor, start time, and end time, that we have to use instead. """ - from ...modifiers.parallax import ParallaxCorrection - from ..utils import make_fake_scene + from satpy.modifiers.parallax import ParallaxCorrection + from satpy.tests.utils import make_fake_scene small = 5 large = 9 (fake_area_small, fake_area_large) = _get_fake_areas( @@ -478,7 +477,7 @@ class TestParallaxCorrectionModifier: def test_parallax_modifier_interface(self): """Test the modifier interface.""" - from ...modifiers.parallax import ParallaxCorrectionModifier + from satpy.modifiers.parallax import ParallaxCorrectionModifier (area_small, area_large) = _get_fake_areas((0, 0), [5, 9], 0.1) fake_bt = xr.DataArray( np.linspace(220, 230, 25).reshape(5, 5), @@ -512,11 +511,11 @@ def test_parallax_modifier_interface_with_cloud(self): BT corresponding to full disk SEVIRI, and test that no strange speckles occur. """ - from ...modifiers.parallax import ParallaxCorrectionModifier + from satpy.modifiers.parallax import ParallaxCorrectionModifier w_cth = 25 h_cth = 15 - proj_dict = {'a': '6378137', 'h': '35785863', 'proj': 'geos', 'units': 'm'} + proj_dict = {"a": "6378137", "h": "35785863", "proj": "geos", "units": "m"} fake_area_cth = pyresample.create_area_def( area_id="test-area", projection=proj_dict, @@ -565,7 +564,7 @@ def test_parallax_modifier_interface_with_cloud(self): # do so after parallax correction assert not (res.diff("x") < 0).any() - @pytest.fixture + @pytest.fixture() def test_area(self, request): """Produce test area for parallax correction unit tests. @@ -623,7 +622,7 @@ def _get_fake_cloud_datasets(self, test_area, cth, use_dask): @pytest.mark.parametrize("test_area", ["foroyar", "ouagadougou"], indirect=["test_area"]) def test_modifier_interface_fog_no_shift(self, test_area): """Test that fog isn't masked or shifted.""" - from ...modifiers.parallax import ParallaxCorrectionModifier + from satpy.modifiers.parallax import ParallaxCorrectionModifier (fake_bt, fake_cth, _) = self._get_fake_cloud_datasets(test_area, 50, use_dask=False) @@ -647,7 +646,7 @@ def test_modifier_interface_cloud_moves_to_observer(self, cth, use_dask, test_ar With the modifier interface, use a high resolution area and test that pixels are moved in the direction of the observer and not away from it. """ - from ...modifiers.parallax import ParallaxCorrectionModifier + from satpy.modifiers.parallax import ParallaxCorrectionModifier (fake_bt, fake_cth, cma) = self._get_fake_cloud_datasets(test_area, cth, use_dask=use_dask) @@ -712,12 +711,12 @@ def test_modifier_interface_cloud_moves_to_observer(self, cth, use_dask, test_ar class TestParallaxCorrectionSceneLoad: """Test that scene load interface works as expected.""" - @pytest.fixture + @pytest.fixture() def yaml_code(self): """Return YAML code for parallax_corrected_VIS006.""" return _test_yaml_code - @pytest.fixture + @pytest.fixture() def conf_file(self, yaml_code, tmp_path): """Produce a fake configuration file.""" conf_file = tmp_path / "test.yaml" @@ -725,7 +724,7 @@ def conf_file(self, yaml_code, tmp_path): fp.write(yaml_code) return conf_file - @pytest.fixture + @pytest.fixture() def fake_scene(self, yaml_code): """Produce fake scene and prepare fake composite config.""" from satpy import Scene diff --git a/satpy/tests/multiscene_tests/test_blend.py b/satpy/tests/multiscene_tests/test_blend.py index 7140d98c8e..f9d7e35462 100644 --- a/satpy/tests/multiscene_tests/test_blend.py +++ b/satpy/tests/multiscene_tests/test_blend.py @@ -42,15 +42,15 @@ def _get_expected_stack_select(scene1: Scene, scene2: Scene) -> xr.DataArray: - expected = scene2['polar-ct'] - expected[..., NUM_TEST_ROWS, :] = scene1['geo-ct'][..., NUM_TEST_ROWS, :] - expected[..., :, NUM_TEST_COLS] = scene1['geo-ct'][..., :, NUM_TEST_COLS] - expected[..., -1, :] = scene1['geo-ct'][..., -1, :] + expected = scene2["polar-ct"] + expected[..., NUM_TEST_ROWS, :] = scene1["geo-ct"][..., NUM_TEST_ROWS, :] + expected[..., :, NUM_TEST_COLS] = scene1["geo-ct"][..., :, NUM_TEST_COLS] + expected[..., -1, :] = scene1["geo-ct"][..., -1, :] return expected.compute() def _get_expected_stack_blend(scene1: Scene, scene2: Scene) -> xr.DataArray: - expected = scene2['polar-ct'].copy().compute().astype(np.float64) + expected = scene2["polar-ct"].copy().compute().astype(np.float64) expected[..., NUM_TEST_ROWS, :] = 5 / 3 # (1*2 + 3*1) / (2 + 1) expected[..., :, NUM_TEST_COLS] = 5 / 3 expected[..., -1, :] = np.nan # (1*0 + 0*1) / (0 + 1) @@ -59,7 +59,7 @@ def _get_expected_stack_blend(scene1: Scene, scene2: Scene) -> xr.DataArray: return expected -@pytest.fixture +@pytest.fixture() def test_area(): """Get area definition used by test DataArrays.""" return _create_test_area() @@ -77,7 +77,7 @@ def image_mode(request): return request.param -@pytest.fixture +@pytest.fixture() def cloud_type_data_array1(test_area, data_type, image_mode): """Get DataArray for cloud type in the first test Scene.""" dsid1 = make_dataid( @@ -88,26 +88,26 @@ def cloud_type_data_array1(test_area, data_type, image_mode): shape = DEFAULT_SHAPE if len(image_mode) == 0 else (len(image_mode),) + DEFAULT_SHAPE dims = ("y", "x") if len(image_mode) == 0 else ("bands", "y", "x") if data_type is np.int8: - data_arr = _create_test_int8_dataset(name='geo-ct', shape=shape, area=test_area, values=1, dims=dims) + data_arr = _create_test_int8_dataset(name="geo-ct", shape=shape, area=test_area, values=1, dims=dims) else: - data_arr = _create_test_dataset(name='geo-ct', shape=shape, area=test_area, values=1.0, dims=dims) - - data_arr.attrs['platform_name'] = 'Meteosat-11' - data_arr.attrs['sensor'] = {'seviri'} - data_arr.attrs['units'] = '1' - data_arr.attrs['long_name'] = 'NWC GEO CT Cloud Type' - data_arr.attrs['orbital_parameters'] = { - 'satellite_nominal_altitude': 35785863.0, - 'satellite_nominal_longitude': 0.0, - 'satellite_nominal_latitude': 0, + data_arr = _create_test_dataset(name="geo-ct", shape=shape, area=test_area, values=1.0, dims=dims) + + data_arr.attrs["platform_name"] = "Meteosat-11" + data_arr.attrs["sensor"] = {"seviri"} + data_arr.attrs["units"] = "1" + data_arr.attrs["long_name"] = "NWC GEO CT Cloud Type" + data_arr.attrs["orbital_parameters"] = { + "satellite_nominal_altitude": 35785863.0, + "satellite_nominal_longitude": 0.0, + "satellite_nominal_latitude": 0, } - data_arr.attrs['start_time'] = datetime(2023, 1, 16, 11, 9, 17) - data_arr.attrs['end_time'] = datetime(2023, 1, 16, 11, 12, 22) + data_arr.attrs["start_time"] = datetime(2023, 1, 16, 11, 9, 17) + data_arr.attrs["end_time"] = datetime(2023, 1, 16, 11, 12, 22) data_arr.attrs["_satpy_id"] = dsid1 return data_arr -@pytest.fixture +@pytest.fixture() def cloud_type_data_array2(test_area, data_type, image_mode): """Get DataArray for cloud type in the second test Scene.""" dsid1 = make_dataid( @@ -118,22 +118,22 @@ def cloud_type_data_array2(test_area, data_type, image_mode): shape = DEFAULT_SHAPE if len(image_mode) == 0 else (len(image_mode),) + DEFAULT_SHAPE dims = ("y", "x") if len(image_mode) == 0 else ("bands", "y", "x") if data_type is np.int8: - data_arr = _create_test_int8_dataset(name='polar-ct', shape=shape, area=test_area, values=3, dims=dims) - data_arr[..., -1, :] = data_arr.attrs['_FillValue'] + data_arr = _create_test_int8_dataset(name="polar-ct", shape=shape, area=test_area, values=3, dims=dims) + data_arr[..., -1, :] = data_arr.attrs["_FillValue"] else: - data_arr = _create_test_dataset(name='polar-ct', shape=shape, area=test_area, values=3.0, dims=dims) + data_arr = _create_test_dataset(name="polar-ct", shape=shape, area=test_area, values=3.0, dims=dims) data_arr[..., -1, :] = np.nan - data_arr.attrs['platform_name'] = 'NOAA-18' - data_arr.attrs['sensor'] = {'avhrr-3'} - data_arr.attrs['units'] = '1' - data_arr.attrs['long_name'] = 'SAFNWC PPS CT Cloud Type' - data_arr.attrs['start_time'] = datetime(2023, 1, 16, 11, 12, 57, 500000) - data_arr.attrs['end_time'] = datetime(2023, 1, 16, 11, 28, 1, 900000) + data_arr.attrs["platform_name"] = "NOAA-18" + data_arr.attrs["sensor"] = {"avhrr-3"} + data_arr.attrs["units"] = "1" + data_arr.attrs["long_name"] = "SAFNWC PPS CT Cloud Type" + data_arr.attrs["start_time"] = datetime(2023, 1, 16, 11, 12, 57, 500000) + data_arr.attrs["end_time"] = datetime(2023, 1, 16, 11, 28, 1, 900000) data_arr.attrs["_satpy_id"] = dsid1 return data_arr -@pytest.fixture +@pytest.fixture() def scene1_with_weights(cloud_type_data_array1, test_area): """Create first test scene with a dataset of weights.""" from satpy import Scene @@ -141,7 +141,7 @@ def scene1_with_weights(cloud_type_data_array1, test_area): scene = Scene() scene[cloud_type_data_array1.attrs["_satpy_id"]] = cloud_type_data_array1 - wgt1 = _create_test_dataset(name='geo-ct-wgt', area=test_area, values=0) + wgt1 = _create_test_dataset(name="geo-ct-wgt", area=test_area, values=0) wgt1[NUM_TEST_ROWS, :] = 2 wgt1[:, NUM_TEST_COLS] = 2 @@ -151,16 +151,16 @@ def scene1_with_weights(cloud_type_data_array1, test_area): resolution=3000, modifiers=() ) - scene[dsid2] = _create_test_int8_dataset(name='geo-cma', area=test_area, values=2) - scene[dsid2].attrs['start_time'] = datetime(2023, 1, 16, 11, 9, 17) - scene[dsid2].attrs['end_time'] = datetime(2023, 1, 16, 11, 12, 22) + scene[dsid2] = _create_test_int8_dataset(name="geo-cma", area=test_area, values=2) + scene[dsid2].attrs["start_time"] = datetime(2023, 1, 16, 11, 9, 17) + scene[dsid2].attrs["end_time"] = datetime(2023, 1, 16, 11, 12, 22) - wgt2 = _create_test_dataset(name='geo-cma-wgt', area=test_area, values=0) + wgt2 = _create_test_dataset(name="geo-cma-wgt", area=test_area, values=0) return scene, [wgt1, wgt2] -@pytest.fixture +@pytest.fixture() def scene2_with_weights(cloud_type_data_array2, test_area): """Create second test scene.""" from satpy import Scene @@ -168,22 +168,22 @@ def scene2_with_weights(cloud_type_data_array2, test_area): scene = Scene() scene[cloud_type_data_array2.attrs["_satpy_id"]] = cloud_type_data_array2 - wgt1 = _create_test_dataset(name='polar-ct-wgt', area=test_area, values=1) + wgt1 = _create_test_dataset(name="polar-ct-wgt", area=test_area, values=1) dsid2 = make_dataid( name="polar-cma", resolution=1000, modifiers=() ) - scene[dsid2] = _create_test_int8_dataset(name='polar-cma', area=test_area, values=4) - scene[dsid2].attrs['start_time'] = datetime(2023, 1, 16, 11, 12, 57, 500000) - scene[dsid2].attrs['end_time'] = datetime(2023, 1, 16, 11, 28, 1, 900000) + scene[dsid2] = _create_test_int8_dataset(name="polar-cma", area=test_area, values=4) + scene[dsid2].attrs["start_time"] = datetime(2023, 1, 16, 11, 12, 57, 500000) + scene[dsid2].attrs["end_time"] = datetime(2023, 1, 16, 11, 28, 1, 900000) - wgt2 = _create_test_dataset(name='polar-cma-wgt', area=test_area, values=1) + wgt2 = _create_test_dataset(name="polar-cma-wgt", area=test_area, values=1) return scene, [wgt1, wgt2] -@pytest.fixture +@pytest.fixture() def multi_scene_and_weights(scene1_with_weights, scene2_with_weights): """Create small multi-scene for testing.""" from satpy import MultiScene @@ -193,12 +193,12 @@ def multi_scene_and_weights(scene1_with_weights, scene2_with_weights): return MultiScene([scene1, scene2]), [weights1, weights2] -@pytest.fixture +@pytest.fixture() def groups(): """Get group definitions for the MultiScene.""" return { - DataQuery(name='CloudType'): ['geo-ct', 'polar-ct'], - DataQuery(name='CloudMask'): ['geo-cma', 'polar-cma'] + DataQuery(name="CloudType"): ["geo-ct", "polar-ct"], + DataQuery(name="CloudMask"): ["geo-cma", "polar-cma"] } @@ -216,15 +216,15 @@ def test_blend_two_scenes_using_stack(self, multi_scene_and_weights, groups, resampled = multi_scene stacked = resampled.blend(blend_function=stack) - result = stacked['CloudType'].compute() + result = stacked["CloudType"].compute() - expected = scene2['polar-ct'].copy() - expected[..., -1, :] = scene1['geo-ct'][..., -1, :] + expected = scene2["polar-ct"].copy() + expected[..., -1, :] = scene1["geo-ct"][..., -1, :] xr.testing.assert_equal(result, expected.compute()) _check_stacked_metadata(result, "CloudType") - assert result.attrs['start_time'] == datetime(2023, 1, 16, 11, 9, 17) - assert result.attrs['end_time'] == datetime(2023, 1, 16, 11, 28, 1, 900000) + assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 9, 17) + assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 28, 1, 900000) def test_blend_two_scenes_bad_blend_type(self, multi_scene_and_weights, groups): """Test exception is raised when bad 'blend_type' is used.""" @@ -232,11 +232,11 @@ def test_blend_two_scenes_bad_blend_type(self, multi_scene_and_weights, groups): multi_scene, weights = multi_scene_and_weights - simple_groups = {DataQuery(name='CloudType'): groups[DataQuery(name='CloudType')]} + simple_groups = {DataQuery(name="CloudType"): groups[DataQuery(name="CloudType")]} multi_scene.group(simple_groups) weights = [weights[0][0], weights[1][0]] stack_func = partial(stack, weights=weights, blend_type="i_dont_exist") - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unknown weighted blending type: .*.Expected one of: .*"): multi_scene.blend(blend_function=stack_func) @pytest.mark.parametrize( @@ -262,7 +262,7 @@ def test_blend_two_scenes_using_stack_weighted(self, multi_scene_and_weights, gr scene1, weights1 = scene1_with_weights scene2, weights2 = scene2_with_weights - simple_groups = {DataQuery(name='CloudType'): groups[DataQuery(name='CloudType')]} + simple_groups = {DataQuery(name="CloudType"): groups[DataQuery(name="CloudType")]} multi_scene.group(simple_groups) weights = [weights[0][0], weights[1][0]] @@ -270,52 +270,52 @@ def test_blend_two_scenes_using_stack_weighted(self, multi_scene_and_weights, gr weighted_blend = multi_scene.blend(blend_function=stack_func) expected = exp_result_func(scene1, scene2) - result = weighted_blend['CloudType'].compute() + result = weighted_blend["CloudType"].compute() # result has NaNs and xarray's xr.testing.assert_equal doesn't support NaN comparison np.testing.assert_allclose(result.data, expected.data) _check_stacked_metadata(result, "CloudType") if combine_times: - assert result.attrs['start_time'] == datetime(2023, 1, 16, 11, 9, 17) - assert result.attrs['end_time'] == datetime(2023, 1, 16, 11, 28, 1, 900000) + assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 9, 17) + assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 28, 1, 900000) else: - assert result.attrs['start_time'] == datetime(2023, 1, 16, 11, 11, 7, 250000) - assert result.attrs['end_time'] == datetime(2023, 1, 16, 11, 20, 11, 950000) + assert result.attrs["start_time"] == datetime(2023, 1, 16, 11, 11, 7, 250000) + assert result.attrs["end_time"] == datetime(2023, 1, 16, 11, 20, 11, 950000) - @pytest.fixture + @pytest.fixture() def datasets_and_weights(self): """X-Array datasets with area definition plus weights for input to tests.""" shape = (8, 12) - area = AreaDefinition('test', 'test', 'test', - {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + area = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, shape[1], shape[0], [-200, -200, 200, 200]) - ds1 = xr.DataArray(da.ones(shape, chunks=-1), dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1, 0, 0, 0), 'area': area}) - ds2 = xr.DataArray(da.ones(shape, chunks=-1) * 2, dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1, 1, 0, 0), 'area': area}) - ds3 = xr.DataArray(da.ones(shape, chunks=-1) * 3, dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1, 1, 0, 0), 'area': area}) - - ds4 = xr.DataArray(da.zeros(shape, chunks=-1), dims=('y', 'time'), - attrs={'start_time': datetime(2018, 1, 1, 0, 0, 0), 'area': area}) - ds5 = xr.DataArray(da.zeros(shape, chunks=-1), dims=('y', 'time'), - attrs={'start_time': datetime(2018, 1, 1, 1, 0, 0), 'area': area}) - - wgt1 = xr.DataArray(da.ones(shape, chunks=-1), dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1, 0, 0, 0), 'area': area}) - wgt2 = xr.DataArray(da.zeros(shape, chunks=-1), dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1, 0, 0, 0), 'area': area}) - wgt3 = xr.DataArray(da.zeros(shape, chunks=-1), dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1, 0, 0, 0), 'area': area}) - - datastruct = {'shape': shape, - 'area': area, - 'datasets': [ds1, ds2, ds3, ds4, ds5], - 'weights': [wgt1, wgt2, wgt3]} + ds1 = xr.DataArray(da.ones(shape, chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + ds2 = xr.DataArray(da.ones(shape, chunks=-1) * 2, dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1, 1, 0, 0), "area": area}) + ds3 = xr.DataArray(da.ones(shape, chunks=-1) * 3, dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1, 1, 0, 0), "area": area}) + + ds4 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "time"), + attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + ds5 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "time"), + attrs={"start_time": datetime(2018, 1, 1, 1, 0, 0), "area": area}) + + wgt1 = xr.DataArray(da.ones(shape, chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + wgt2 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + wgt3 = xr.DataArray(da.zeros(shape, chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1, 0, 0, 0), "area": area}) + + datastruct = {"shape": shape, + "area": area, + "datasets": [ds1, ds2, ds3, ds4, ds5], + "weights": [wgt1, wgt2, wgt3]} return datastruct - @pytest.mark.parametrize(('line', 'column',), + @pytest.mark.parametrize(("line", "column",), [(2, 3), (4, 5)] ) def test_blend_function_stack_weighted(self, datasets_and_weights, line, column): @@ -326,19 +326,19 @@ def test_blend_function_stack_weighted(self, datasets_and_weights, line, column) input_data = datasets_and_weights - input_data['weights'][1][line, :] = 2 - input_data['weights'][2][:, column] = 2 + input_data["weights"][1][line, :] = 2 + input_data["weights"][2][:, column] = 2 - stack_with_weights = partial(stack, weights=input_data['weights'], combine_times=False) - blend_result = stack_with_weights(input_data['datasets'][0:3]) + stack_with_weights = partial(stack, weights=input_data["weights"], combine_times=False) + blend_result = stack_with_weights(input_data["datasets"][0:3]) - ds1 = input_data['datasets'][0] - ds2 = input_data['datasets'][1] - ds3 = input_data['datasets'][2] + ds1 = input_data["datasets"][0] + ds2 = input_data["datasets"][1] + ds3 = input_data["datasets"][2] expected = ds1.copy() expected[:, column] = ds3[:, column] expected[line, :] = ds2[line, :] - expected.attrs = combine_metadata(*[x.attrs for x in input_data['datasets'][0:3]]) + expected.attrs = combine_metadata(*[x.attrs for x in input_data["datasets"][0:3]]) xr.testing.assert_equal(blend_result.compute(), expected.compute()) assert expected.attrs == blend_result.attrs @@ -347,8 +347,8 @@ def test_blend_function_stack(self, datasets_and_weights): """Test the 'stack' function.""" input_data = datasets_and_weights - ds1 = input_data['datasets'][0] - ds2 = input_data['datasets'][1] + ds1 = input_data["datasets"][0] + ds2 = input_data["datasets"][1] res = stack([ds1, ds2]) expected = ds2.copy() @@ -361,11 +361,11 @@ def test_timeseries(self, datasets_and_weights): """Test the 'timeseries' function.""" input_data = datasets_and_weights - ds1 = input_data['datasets'][0] - ds2 = input_data['datasets'][1] - ds4 = input_data['datasets'][2] - ds4 = input_data['datasets'][3] - ds5 = input_data['datasets'][4] + ds1 = input_data["datasets"][0] + ds2 = input_data["datasets"][1] + ds4 = input_data["datasets"][2] + ds4 = input_data["datasets"][3] + ds5 = input_data["datasets"][4] res = timeseries([ds1, ds2]) res2 = timeseries([ds4, ds5]) @@ -376,41 +376,41 @@ def test_timeseries(self, datasets_and_weights): def _check_stacked_metadata(data_arr: xr.DataArray, exp_name: str) -> None: - assert data_arr.attrs['units'] == '1' - assert data_arr.attrs['name'] == exp_name + assert data_arr.attrs["units"] == "1" + assert data_arr.attrs["name"] == exp_name if "_FillValue" in data_arr.attrs: - assert data_arr.attrs['_FillValue'] == 255 - assert data_arr.attrs['valid_range'] == [1, 15] + assert data_arr.attrs["_FillValue"] == 255 + assert data_arr.attrs["valid_range"] == [1, 15] expected_area = _create_test_area() - assert data_arr.attrs['area'] == expected_area + assert data_arr.attrs["area"] == expected_area # these metadata items don't match between all inputs - assert 'sensor' not in data_arr.attrs - assert 'platform_name' not in data_arr.attrs - assert 'long_name' not in data_arr.attrs + assert "sensor" not in data_arr.attrs + assert "platform_name" not in data_arr.attrs + assert "long_name" not in data_arr.attrs class TestTemporalRGB: """Test the temporal RGB blending method.""" - @pytest.fixture + @pytest.fixture() def nominal_data(self): """Return the input arrays for the nominal use case.""" - da1 = xr.DataArray([1, 0, 0], attrs={'start_time': datetime(2023, 5, 22, 9, 0, 0)}) - da2 = xr.DataArray([0, 1, 0], attrs={'start_time': datetime(2023, 5, 22, 10, 0, 0)}) - da3 = xr.DataArray([0, 0, 1], attrs={'start_time': datetime(2023, 5, 22, 11, 0, 0)}) + da1 = xr.DataArray([1, 0, 0], attrs={"start_time": datetime(2023, 5, 22, 9, 0, 0)}) + da2 = xr.DataArray([0, 1, 0], attrs={"start_time": datetime(2023, 5, 22, 10, 0, 0)}) + da3 = xr.DataArray([0, 0, 1], attrs={"start_time": datetime(2023, 5, 22, 11, 0, 0)}) return [da1, da2, da3] - @pytest.fixture + @pytest.fixture() def expected_result(self): """Return the expected result arrays.""" return [[1, 0, 0], [0, 1, 0], [0, 0, 1]] @staticmethod def _assert_results(res, expected_start_time, expected_result): - assert res.attrs['start_time'] == expected_start_time + assert res.attrs["start_time"] == expected_start_time np.testing.assert_equal(res.data[0, :], expected_result[0]) np.testing.assert_equal(res.data[1, :], expected_result[1]) np.testing.assert_equal(res.data[2, :], expected_result[2]) @@ -421,14 +421,14 @@ def test_nominal(self, nominal_data, expected_result): res = temporal_rgb(nominal_data) - self._assert_results(res, nominal_data[-1].attrs['start_time'], expected_result) + self._assert_results(res, nominal_data[-1].attrs["start_time"], expected_result) def test_extra_datasets(self, nominal_data, expected_result): """Test that only the first three arrays affect the usage.""" from satpy.multiscene import temporal_rgb - da4 = xr.DataArray([0, 0, 1], attrs={'start_time': datetime(2023, 5, 22, 12, 0, 0)}) + da4 = xr.DataArray([0, 0, 1], attrs={"start_time": datetime(2023, 5, 22, 12, 0, 0)}) res = temporal_rgb(nominal_data + [da4,]) - self._assert_results(res, nominal_data[-1].attrs['start_time'], expected_result) + self._assert_results(res, nominal_data[-1].attrs["start_time"], expected_result) diff --git a/satpy/tests/multiscene_tests/test_misc.py b/satpy/tests/multiscene_tests/test_misc.py index 0cfedf226f..9f6e400e31 100644 --- a/satpy/tests/multiscene_tests/test_misc.py +++ b/satpy/tests/multiscene_tests/test_misc.py @@ -49,28 +49,26 @@ def test_properties(self): area = _create_test_area() scenes = _create_test_scenes(area=area) - ds1_id = make_dataid(name='ds1') - ds2_id = make_dataid(name='ds2') - ds3_id = make_dataid(name='ds3') - ds4_id = make_dataid(name='ds4') + ds1_id = make_dataid(name="ds1") + ds2_id = make_dataid(name="ds2") + ds3_id = make_dataid(name="ds3") + ds4_id = make_dataid(name="ds4") # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") mscn = MultiScene(scenes) - self.assertSetEqual(mscn.loaded_dataset_ids, - {ds1_id, ds2_id, ds3_id}) - self.assertSetEqual(mscn.shared_dataset_ids, {ds1_id, ds2_id}) - self.assertTrue(mscn.all_same_area) + assert mscn.loaded_dataset_ids == {ds1_id, ds2_id, ds3_id} + assert mscn.shared_dataset_ids == {ds1_id, ds2_id} + assert mscn.all_same_area bigger_area = _create_test_area(shape=(20, 40)) - scenes[0]['ds4'] = _create_test_dataset('ds4', shape=(20, 40), + scenes[0]["ds4"] = _create_test_dataset("ds4", shape=(20, 40), area=bigger_area) - self.assertSetEqual(mscn.loaded_dataset_ids, - {ds1_id, ds2_id, ds3_id, ds4_id}) - self.assertSetEqual(mscn.shared_dataset_ids, {ds1_id, ds2_id}) - self.assertFalse(mscn.all_same_area) + assert mscn.loaded_dataset_ids == {ds1_id, ds2_id, ds3_id, ds4_id} + assert mscn.shared_dataset_ids == {ds1_id, ds2_id} + assert not mscn.all_same_area def test_from_files(self): """Test creating a multiscene from multiple files.""" @@ -93,14 +91,14 @@ def test_from_files(self): "OR_GLM-L2-GLMC-M3_G16_s20171171506000_e20171171507000_c20380190314080.nc", "OR_GLM-L2-GLMC-M3_G16_s20171171507000_e20171171508000_c20380190314080.nc", ] - with mock.patch('satpy.multiscene._multiscene.Scene') as scn_mock: + with mock.patch("satpy.multiscene._multiscene.Scene") as scn_mock: mscn = MultiScene.from_files( input_files_abi, - reader='abi_l1b', + reader="abi_l1b", scene_kwargs={"reader_kwargs": {}}) assert len(mscn.scenes) == 6 calls = [mock.call( - filenames={'abi_l1b': [in_file_abi]}, + filenames={"abi_l1b": [in_file_abi]}, reader_kwargs={}) for in_file_abi in input_files_abi] scn_mock.assert_has_calls(calls) @@ -109,13 +107,13 @@ def test_from_files(self): with pytest.warns(DeprecationWarning): mscn = MultiScene.from_files( input_files_abi + input_files_glm, - reader=('abi_l1b', "glm_l2"), + reader=("abi_l1b", "glm_l2"), group_keys=["start_time"], ensure_all_readers=True, time_threshold=30) assert len(mscn.scenes) == 2 calls = [mock.call( - filenames={'abi_l1b': [in_file_abi], 'glm_l2': [in_file_glm]}) + filenames={"abi_l1b": [in_file_abi], "glm_l2": [in_file_glm]}) for (in_file_abi, in_file_glm) in zip(input_files_abi[0:2], [input_files_glm[2]] + [input_files_glm[7]])] @@ -123,7 +121,7 @@ def test_from_files(self): scn_mock.reset_mock() mscn = MultiScene.from_files( input_files_abi + input_files_glm, - reader=('abi_l1b', "glm_l2"), + reader=("abi_l1b", "glm_l2"), group_keys=["start_time"], ensure_all_readers=False, time_threshold=30) @@ -133,7 +131,7 @@ def test_from_files(self): class TestMultiSceneGrouping: """Test dataset grouping in MultiScene.""" - @pytest.fixture + @pytest.fixture() def scene1(self): """Create first test scene.""" from satpy import Scene @@ -144,17 +142,17 @@ def scene1(self): wavelength=(1, 2, 3), polarization="H" ) - scene[dsid1] = _create_test_dataset(name='ds1') + scene[dsid1] = _create_test_dataset(name="ds1") dsid2 = make_dataid( name="ds2", resolution=456, wavelength=(4, 5, 6), polarization="V" ) - scene[dsid2] = _create_test_dataset(name='ds2') + scene[dsid2] = _create_test_dataset(name="ds2") return scene - @pytest.fixture + @pytest.fixture() def scene2(self): """Create second test scene.""" from satpy import Scene @@ -165,28 +163,28 @@ def scene2(self): wavelength=(1.1, 2.1, 3.1), polarization="H" ) - scene[dsid1] = _create_test_dataset(name='ds3') + scene[dsid1] = _create_test_dataset(name="ds3") dsid2 = make_dataid( name="ds4", resolution=456.1, wavelength=(4.1, 5.1, 6.1), polarization="V" ) - scene[dsid2] = _create_test_dataset(name='ds4') + scene[dsid2] = _create_test_dataset(name="ds4") return scene - @pytest.fixture + @pytest.fixture() def multi_scene(self, scene1, scene2): """Create small multi scene for testing.""" from satpy import MultiScene return MultiScene([scene1, scene2]) - @pytest.fixture + @pytest.fixture() def groups(self): """Get group definitions for the MultiScene.""" return { - DataQuery(name='odd'): ['ds1', 'ds3'], - DataQuery(name='even'): ['ds2', 'ds4'] + DataQuery(name="odd"): ["ds1", "ds3"], + DataQuery(name="even"): ["ds2", "ds4"] } def test_multi_scene_grouping(self, multi_scene, groups, scene1): @@ -194,12 +192,12 @@ def test_multi_scene_grouping(self, multi_scene, groups, scene1): multi_scene.group(groups) shared_ids_exp = {make_dataid(name="odd"), make_dataid(name="even")} assert multi_scene.shared_dataset_ids == shared_ids_exp - assert DataQuery(name='odd') not in scene1 + assert DataQuery(name="odd") not in scene1 xr.testing.assert_allclose(multi_scene.scenes[0]["ds1"], scene1["ds1"]) def test_fails_to_add_multiple_datasets_from_the_same_scene_to_a_group(self, multi_scene): """Test that multiple datasets from the same scene in one group fails.""" - groups = {DataQuery(name='mygroup'): ['ds1', 'ds2']} + groups = {DataQuery(name="mygroup"): ["ds1", "ds2"]} multi_scene.group(groups) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Cannot add multiple datasets from a scene to the same group"): next(multi_scene.scenes) diff --git a/satpy/tests/multiscene_tests/test_save_animation.py b/satpy/tests/multiscene_tests/test_save_animation.py index 5e5b4a1d63..6807446bbb 100644 --- a/satpy/tests/multiscene_tests/test_save_animation.py +++ b/satpy/tests/multiscene_tests/test_save_animation.py @@ -50,7 +50,7 @@ def tearDown(self): except OSError: pass - @mock.patch('satpy.multiscene._multiscene.get_enhanced_image', _fake_get_enhanced_image) + @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_mp4_distributed(self): """Save a series of fake scenes to an mp4 video.""" from satpy import MultiScene @@ -58,61 +58,61 @@ def test_save_mp4_distributed(self): scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time - for ds_id in ['ds1', 'ds2', 'ds3']: - scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) - if ds_id == 'ds3': + for ds_id in ["ds1", "ds2", "ds3"]: + scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + if ds_id == "ds3": continue - scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = os.path.join( self.base_dir, - 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') + "test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4") writer_mock = mock.MagicMock() client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v.compute() for v in x) client_mock.gather.side_effect = lambda x: x - with mock.patch('satpy.multiscene._multiscene.imageio.get_writer') as get_writer: + with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer: get_writer.return_value = writer_mock # force order of datasets by specifying them - mscn.save_animation(fn, client=client_mock, datasets=['ds1', 'ds2', 'ds3']) + mscn.save_animation(fn, client=client_mock, datasets=["ds1", "ds2", "ds3"]) # 2 saves for the first scene + 1 black frame # 3 for the second scene - self.assertEqual(writer_mock.append_data.call_count, 3 + 3) + assert writer_mock.append_data.call_count == 3 + 3 filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] - self.assertEqual(filenames[0], 'test_save_mp4_ds1_20180101_00_20180102_12.mp4') - self.assertEqual(filenames[1], 'test_save_mp4_ds2_20180101_00_20180102_12.mp4') - self.assertEqual(filenames[2], 'test_save_mp4_ds3_20180102_00_20180102_12.mp4') + assert filenames[0] == "test_save_mp4_ds1_20180101_00_20180102_12.mp4" + assert filenames[1] == "test_save_mp4_ds2_20180101_00_20180102_12.mp4" + assert filenames[2] == "test_save_mp4_ds3_20180102_00_20180102_12.mp4" # Test no distributed client found mscn = MultiScene(scenes) fn = os.path.join( self.base_dir, - 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') + "test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4") writer_mock = mock.MagicMock() client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v.compute() for v in x) client_mock.gather.side_effect = lambda x: x - with mock.patch('satpy.multiscene._multiscene.imageio.get_writer') as get_writer, \ - mock.patch('satpy.multiscene._multiscene.get_client', mock.Mock(side_effect=ValueError("No client"))): + with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer, \ + mock.patch("satpy.multiscene._multiscene.get_client", mock.Mock(side_effect=ValueError("No client"))): get_writer.return_value = writer_mock # force order of datasets by specifying them - mscn.save_animation(fn, datasets=['ds1', 'ds2', 'ds3']) + mscn.save_animation(fn, datasets=["ds1", "ds2", "ds3"]) # 2 saves for the first scene + 1 black frame # 3 for the second scene - self.assertEqual(writer_mock.append_data.call_count, 3 + 3) + assert writer_mock.append_data.call_count == 3 + 3 filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] - self.assertEqual(filenames[0], 'test_save_mp4_ds1_20180101_00_20180102_12.mp4') - self.assertEqual(filenames[1], 'test_save_mp4_ds2_20180101_00_20180102_12.mp4') - self.assertEqual(filenames[2], 'test_save_mp4_ds3_20180102_00_20180102_12.mp4') + assert filenames[0] == "test_save_mp4_ds1_20180101_00_20180102_12.mp4" + assert filenames[1] == "test_save_mp4_ds2_20180101_00_20180102_12.mp4" + assert filenames[2] == "test_save_mp4_ds3_20180102_00_20180102_12.mp4" - @mock.patch('satpy.multiscene._multiscene.get_enhanced_image', _fake_get_enhanced_image) + @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_mp4_no_distributed(self): """Save a series of fake scenes to an mp4 video when distributed isn't available.""" from satpy import MultiScene @@ -120,39 +120,39 @@ def test_save_mp4_no_distributed(self): scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time - for ds_id in ['ds1', 'ds2', 'ds3']: - scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) - if ds_id == 'ds3': + for ds_id in ["ds1", "ds2", "ds3"]: + scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + if ds_id == "ds3": continue - scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = os.path.join( self.base_dir, - 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') + "test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4") writer_mock = mock.MagicMock() client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v.compute() for v in x) client_mock.gather.side_effect = lambda x: x - with mock.patch('satpy.multiscene._multiscene.imageio.get_writer') as get_writer, \ - mock.patch('satpy.multiscene._multiscene.get_client', None): + with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer, \ + mock.patch("satpy.multiscene._multiscene.get_client", None): get_writer.return_value = writer_mock # force order of datasets by specifying them - mscn.save_animation(fn, datasets=['ds1', 'ds2', 'ds3']) + mscn.save_animation(fn, datasets=["ds1", "ds2", "ds3"]) # 2 saves for the first scene + 1 black frame # 3 for the second scene - self.assertEqual(writer_mock.append_data.call_count, 3 + 3) + assert writer_mock.append_data.call_count == 3 + 3 filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] - self.assertEqual(filenames[0], 'test_save_mp4_ds1_20180101_00_20180102_12.mp4') - self.assertEqual(filenames[1], 'test_save_mp4_ds2_20180101_00_20180102_12.mp4') - self.assertEqual(filenames[2], 'test_save_mp4_ds3_20180102_00_20180102_12.mp4') + assert filenames[0] == "test_save_mp4_ds1_20180101_00_20180102_12.mp4" + assert filenames[1] == "test_save_mp4_ds2_20180101_00_20180102_12.mp4" + assert filenames[2] == "test_save_mp4_ds3_20180102_00_20180102_12.mp4" - @mock.patch('satpy.multiscene._multiscene.get_enhanced_image', _fake_get_enhanced_image) + @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_datasets_simple(self): """Save a series of fake scenes to an PNG images.""" from satpy import MultiScene @@ -160,30 +160,30 @@ def test_save_datasets_simple(self): scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time - for ds_id in ['ds1', 'ds2', 'ds3']: - scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) - if ds_id == 'ds3': + for ds_id in ["ds1", "ds2", "ds3"]: + scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + if ds_id == "ds3": continue - scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() client_mock.compute.side_effect = lambda x: tuple(v for v in x) client_mock.gather.side_effect = lambda x: x - with mock.patch('satpy.multiscene._multiscene.Scene.save_datasets') as save_datasets: + with mock.patch("satpy.multiscene._multiscene.Scene.save_datasets") as save_datasets: save_datasets.return_value = [True] # some arbitrary return value # force order of datasets by specifying them - mscn.save_datasets(base_dir=self.base_dir, client=False, datasets=['ds1', 'ds2', 'ds3'], - writer='simple_image') + mscn.save_datasets(base_dir=self.base_dir, client=False, datasets=["ds1", "ds2", "ds3"], + writer="simple_image") # 2 for each scene - self.assertEqual(save_datasets.call_count, 2) + assert save_datasets.call_count == 2 - @mock.patch('satpy.multiscene._multiscene.get_enhanced_image', _fake_get_enhanced_image) + @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_datasets_distributed_delayed(self): """Test distributed save for writers returning delayed obejcts e.g. simple_image.""" from dask.delayed import Delayed @@ -193,15 +193,15 @@ def test_save_datasets_distributed_delayed(self): scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time - for ds_id in ['ds1', 'ds2', 'ds3']: - scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) - if ds_id == 'ds3': + for ds_id in ["ds1", "ds2", "ds3"]: + scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + if ds_id == "ds3": continue - scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() @@ -209,16 +209,16 @@ def test_save_datasets_distributed_delayed(self): client_mock.gather.side_effect = lambda x: x future_mock = mock.MagicMock() future_mock.__class__ = Delayed - with mock.patch('satpy.multiscene._multiscene.Scene.save_datasets') as save_datasets: + with mock.patch("satpy.multiscene._multiscene.Scene.save_datasets") as save_datasets: save_datasets.return_value = [future_mock] # some arbitrary return value # force order of datasets by specifying them - mscn.save_datasets(base_dir=self.base_dir, client=client_mock, datasets=['ds1', 'ds2', 'ds3'], - writer='simple_image') + mscn.save_datasets(base_dir=self.base_dir, client=client_mock, datasets=["ds1", "ds2", "ds3"], + writer="simple_image") # 2 for each scene - self.assertEqual(save_datasets.call_count, 2) + assert save_datasets.call_count == 2 - @mock.patch('satpy.multiscene._multiscene.get_enhanced_image', _fake_get_enhanced_image) + @mock.patch("satpy.multiscene._multiscene.get_enhanced_image", _fake_get_enhanced_image) def test_save_datasets_distributed_source_target(self): """Test distributed save for writers returning sources and targets e.g. geotiff writer.""" import dask.array as da @@ -228,15 +228,15 @@ def test_save_datasets_distributed_source_target(self): scenes = _create_test_scenes(area=area) # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time - for ds_id in ['ds1', 'ds2', 'ds3']: - scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) - if ds_id == 'ds3': + for ds_id in ["ds1", "ds2", "ds3"]: + scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + if ds_id == "ds3": continue - scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) client_mock = mock.MagicMock() @@ -245,12 +245,12 @@ def test_save_datasets_distributed_source_target(self): source_mock = mock.MagicMock() source_mock.__class__ = da.Array target_mock = mock.MagicMock() - with mock.patch('satpy.multiscene._multiscene.Scene.save_datasets') as save_datasets: + with mock.patch("satpy.multiscene._multiscene.Scene.save_datasets") as save_datasets: save_datasets.return_value = [(source_mock, target_mock)] # some arbitrary return value # force order of datasets by specifying them with self.assertRaises(NotImplementedError): - mscn.save_datasets(base_dir=self.base_dir, client=client_mock, datasets=['ds1', 'ds2', 'ds3'], - writer='geotiff') + mscn.save_datasets(base_dir=self.base_dir, client=client_mock, datasets=["ds1", "ds2", "ds3"], + writer="geotiff") def test_crop(self): """Test the crop method.""" @@ -262,44 +262,44 @@ def test_crop(self): scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) - proj_dict = {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, - 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'} + proj_dict = {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, + "lon_0": 0.0, "proj": "geos", "units": "m"} x_size = 3712 y_size = 3712 area_def = AreaDefinition( - 'test', 'test', 'test', + "test", "test", "test", proj_dict, x_size, y_size, area_extent, ) area_def2 = AreaDefinition( - 'test2', 'test2', 'test2', proj_dict, + "test2", "test2", "test2", proj_dict, x_size // 2, y_size // 2, area_extent, ) scene1["1"] = DataArray(np.zeros((y_size, x_size))) - scene1["2"] = DataArray(np.zeros((y_size, x_size)), dims=('y', 'x')) - scene1["3"] = DataArray(np.zeros((y_size, x_size)), dims=('y', 'x'), - attrs={'area': area_def}) - scene1["4"] = DataArray(np.zeros((y_size // 2, x_size // 2)), dims=('y', 'x'), - attrs={'area': area_def2}) + scene1["2"] = DataArray(np.zeros((y_size, x_size)), dims=("y", "x")) + scene1["3"] = DataArray(np.zeros((y_size, x_size)), dims=("y", "x"), + attrs={"area": area_def}) + scene1["4"] = DataArray(np.zeros((y_size // 2, x_size // 2)), dims=("y", "x"), + attrs={"area": area_def2}) mscn = MultiScene([scene1]) # by lon/lat bbox new_mscn = mscn.crop(ll_bbox=(-20., -5., 0, 0)) new_scn1 = list(new_mscn.scenes)[0] - self.assertIn('1', new_scn1) - self.assertIn('2', new_scn1) - self.assertIn('3', new_scn1) - self.assertTupleEqual(new_scn1['1'].shape, (y_size, x_size)) - self.assertTupleEqual(new_scn1['2'].shape, (y_size, x_size)) - self.assertTupleEqual(new_scn1['3'].shape, (184, 714)) - self.assertTupleEqual(new_scn1['4'].shape, (92, 357)) + assert "1" in new_scn1 + assert "2" in new_scn1 + assert "3" in new_scn1 + assert new_scn1["1"].shape == (y_size, x_size) + assert new_scn1["2"].shape == (y_size, x_size) + assert new_scn1["3"].shape == (184, 714) + assert new_scn1["4"].shape == (92, 357) -@mock.patch('satpy.multiscene._multiscene.get_enhanced_image') +@mock.patch("satpy.multiscene._multiscene.get_enhanced_image") def test_save_mp4(smg, tmp_path): """Save a series of fake scenes to an mp4 video.""" from satpy import MultiScene @@ -308,38 +308,38 @@ def test_save_mp4(smg, tmp_path): smg.side_effect = _fake_get_enhanced_image # Add a dataset to only one of the Scenes - scenes[1]['ds3'] = _create_test_dataset('ds3') + scenes[1]["ds3"] = _create_test_dataset("ds3") # Add a start and end time - for ds_id in ['ds1', 'ds2', 'ds3']: - scenes[1][ds_id].attrs['start_time'] = datetime(2018, 1, 2) - scenes[1][ds_id].attrs['end_time'] = datetime(2018, 1, 2, 12) - if ds_id == 'ds3': + for ds_id in ["ds1", "ds2", "ds3"]: + scenes[1][ds_id].attrs["start_time"] = datetime(2018, 1, 2) + scenes[1][ds_id].attrs["end_time"] = datetime(2018, 1, 2, 12) + if ds_id == "ds3": continue - scenes[0][ds_id].attrs['start_time'] = datetime(2018, 1, 1) - scenes[0][ds_id].attrs['end_time'] = datetime(2018, 1, 1, 12) + scenes[0][ds_id].attrs["start_time"] = datetime(2018, 1, 1) + scenes[0][ds_id].attrs["end_time"] = datetime(2018, 1, 1, 12) mscn = MultiScene(scenes) fn = str(tmp_path / - 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') + "test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4") writer_mock = mock.MagicMock() - with mock.patch('satpy.multiscene._multiscene.imageio.get_writer') as get_writer: + with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer: get_writer.return_value = writer_mock # force order of datasets by specifying them - mscn.save_animation(fn, datasets=['ds1', 'ds2', 'ds3'], client=False) + mscn.save_animation(fn, datasets=["ds1", "ds2", "ds3"], client=False) # 2 saves for the first scene + 1 black frame # 3 for the second scene assert writer_mock.append_data.call_count == 3 + 3 filenames = [os.path.basename(args[0][0]) for args in get_writer.call_args_list] - assert filenames[0] == 'test_save_mp4_ds1_20180101_00_20180102_12.mp4' - assert filenames[1] == 'test_save_mp4_ds2_20180101_00_20180102_12.mp4' - assert filenames[2] == 'test_save_mp4_ds3_20180102_00_20180102_12.mp4' + assert filenames[0] == "test_save_mp4_ds1_20180101_00_20180102_12.mp4" + assert filenames[1] == "test_save_mp4_ds2_20180101_00_20180102_12.mp4" + assert filenames[2] == "test_save_mp4_ds3_20180102_00_20180102_12.mp4" # make sure that not specifying datasets still saves all of them fn = str(tmp_path / - 'test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4') + "test_save_mp4_{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}.mp4") writer_mock = mock.MagicMock() - with mock.patch('satpy.multiscene._multiscene.imageio.get_writer') as get_writer: + with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer: get_writer.return_value = writer_mock # force order of datasets by specifying them mscn.save_animation(fn, client=False) @@ -353,9 +353,9 @@ def test_save_mp4(smg, tmp_path): # test decorating and enhancing fn = str(tmp_path / - 'test-{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}-rich.mp4') + "test-{name}_{start_time:%Y%m%d_%H}_{end_time:%Y%m%d_%H}-rich.mp4") writer_mock = mock.MagicMock() - with mock.patch('satpy.multiscene._multiscene.imageio.get_writer') as get_writer: + with mock.patch("satpy.multiscene._multiscene.imageio.get_writer") as get_writer: get_writer.return_value = writer_mock mscn.save_animation( fn, client=False, diff --git a/satpy/tests/multiscene_tests/test_utils.py b/satpy/tests/multiscene_tests/test_utils.py index 409eb9cf86..310d68c215 100644 --- a/satpy/tests/multiscene_tests/test_utils.py +++ b/satpy/tests/multiscene_tests/test_utils.py @@ -30,27 +30,27 @@ DEFAULT_SHAPE = (5, 10) -local_id_keys_config = {'name': { - 'required': True, +local_id_keys_config = {"name": { + "required": True, }, - 'wavelength': { - 'type': WavelengthRange, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'reflectance', - 'brightness_temperature', - 'radiance', - 'counts' + "resolution": None, + "calibration": { + "enum": [ + "reflectance", + "brightness_temperature", + "radiance", + "counts" ] }, - 'polarization': None, - 'level': None, - 'modifiers': { - 'required': True, - 'default': ModifierTuple(), - 'type': ModifierTuple, + "polarization": None, + "level": None, + "modifiers": { + "required": True, + "default": ModifierTuple(), + "type": ModifierTuple, }, } @@ -63,14 +63,14 @@ def _fake_get_enhanced_image(img, enhance=None, overlay=None, decorate=None): def _create_test_area(proj_str=None, shape=DEFAULT_SHAPE, extents=None): """Create a test area definition.""" if proj_str is None: - proj_str = '+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. ' \ - '+lat_0=25 +lat_1=25 +units=m +no_defs' + proj_str = "+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. " \ + "+lat_0=25 +lat_1=25 +units=m +no_defs" extents = extents or (-1000., -1500., 1000., 1500.) return AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_str, shape[1], shape[0], @@ -82,9 +82,9 @@ def _create_test_int8_dataset(name, shape=DEFAULT_SHAPE, area=None, values=None, """Create a test DataArray object.""" return xr.DataArray( da.ones(shape, dtype=np.uint8, chunks=shape) * values, dims=dims, - attrs={'_FillValue': 255, - 'valid_range': [1, 15], - 'name': name, 'area': area, '_satpy_id_keys': local_id_keys_config}) + attrs={"_FillValue": 255, + "valid_range": [1, 15], + "name": name, "area": area, "_satpy_id_keys": local_id_keys_config}) def _create_test_dataset(name, shape=DEFAULT_SHAPE, area=None, values=None, dims=("y", "x")): @@ -92,22 +92,22 @@ def _create_test_dataset(name, shape=DEFAULT_SHAPE, area=None, values=None, dims if values: return xr.DataArray( da.ones(shape, dtype=np.float32, chunks=shape) * values, dims=dims, - attrs={'name': name, 'area': area, '_satpy_id_keys': local_id_keys_config}) + attrs={"name": name, "area": area, "_satpy_id_keys": local_id_keys_config}) return xr.DataArray( da.zeros(shape, dtype=np.float32, chunks=shape), dims=dims, - attrs={'name': name, 'area': area, '_satpy_id_keys': local_id_keys_config}) + attrs={"name": name, "area": area, "_satpy_id_keys": local_id_keys_config}) def _create_test_scenes(num_scenes=2, shape=DEFAULT_SHAPE, area=None): """Create some test scenes for various test cases.""" from satpy import Scene - ds1 = _create_test_dataset('ds1', shape=shape, area=area) - ds2 = _create_test_dataset('ds2', shape=shape, area=area) + ds1 = _create_test_dataset("ds1", shape=shape, area=area) + ds2 = _create_test_dataset("ds2", shape=shape, area=area) scenes = [] for _ in range(num_scenes): scn = Scene() - scn['ds1'] = ds1.copy() - scn['ds2'] = ds2.copy() + scn["ds1"] = ds1.copy() + scn["ds2"] = ds2.copy() scenes.append(scn) return scenes diff --git a/satpy/tests/reader_tests/_li_test_utils.py b/satpy/tests/reader_tests/_li_test_utils.py index 837e653cc3..d6a32253f5 100644 --- a/satpy/tests/reader_tests/_li_test_utils.py +++ b/satpy/tests/reader_tests/_li_test_utils.py @@ -23,16 +23,16 @@ # mapping of netcdf type code to numpy data type: TYPE_MAP = { - 'i1': np.int8, - 'i2': np.int16, - 'i4': np.int32, - 'i8': np.int64, - 'u1': np.uint8, - 'u2': np.uint16, - 'u4': np.uint32, - 'u8': np.uint64, - 'f4': np.float32, - 'f8': np.float64, + "i1": np.int8, + "i2": np.int16, + "i4": np.int32, + "i8": np.int64, + "u1": np.uint8, + "u2": np.uint16, + "u4": np.uint32, + "u8": np.uint64, + "f4": np.float32, + "f8": np.float64, } @@ -47,55 +47,55 @@ def rand_u16(num): return np.random.randint(low=0, high=np.iinfo(np.uint16).max - 1, size=num, dtype=np.uint16) return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', 'data/'), - 'dimensions': { - 'unfiltered_events': nobs, - 'l1b_chunks': nchunks, - 'l1b_offsets': nchunks, - 'filters': nfilters, - 'scalar': 1, + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", "data/"), + "dimensions": { + "unfiltered_events": nobs, + "l1b_chunks": nchunks, + "l1b_offsets": nchunks, + "filters": nfilters, + "scalar": 1, }, - 'variables': {}, - 'sector_variables': { + "variables": {}, + "sector_variables": { "event_id": { "format": "u2", - "shape": ('unfiltered_events',), + "shape": ("unfiltered_events",), "fill_value": 65535, "long_name": "ID of LI L2 Event", "default_data": lambda: rand_u16(nobs) }, "group_id": { "format": "u2", - "shape": ('unfiltered_events',), + "shape": ("unfiltered_events",), "fill_value": 65535, "long_name": "ID of associated LI L2 Group object", "default_data": lambda: rand_u16(nobs) }, "l1b_chunk_ids": { "format": "u4", - "shape": ('l1b_chunks',), + "shape": ("l1b_chunks",), "fill_value": 4294967295, "long_name": "Array of L1b event chunk IDs", "default_data": lambda: np.arange(nchunks) + 10000 }, "l1b_chunk_offsets": { "format": "u4", - "shape": ('l1b_offsets',), + "shape": ("l1b_offsets",), "fill_value": 4294967295, "long_name": "Array offset for L1b event chunk boundaries", "default_data": lambda: np.arange(nchunks) }, "l1b_window": { "format": "u4", - "shape": ('unfiltered_events',), + "shape": ("unfiltered_events",), "fill_value": 4294967295, "long_name": "window index of associated L1b event", "default_data": lambda: (np.arange(nobs) + 10000) }, "filter_values": { "format": "u1", - "shape": ('unfiltered_events', 'filters',), + "shape": ("unfiltered_events", "filters",), "fill_value": 255, "scale_factor": 0.004, "add_offset": 0.0, @@ -104,22 +104,22 @@ def rand_u16(num): }, "epoch_time": { "format": "f8", - "shape": ('scalar',), + "shape": ("scalar",), "fill_value": 9.96920996886869e36, "long_name": "Start time of integration frame", "default_data": lambda: 1.234, - 'precision': '1 millisecond', - 'time_standard': 'UTC', - 'standard_name': 'time', - 'units': 'seconds since 2000-01-01 00:00:00.0', + "precision": "1 millisecond", + "time_standard": "UTC", + "standard_name": "time", + "units": "seconds since 2000-01-01 00:00:00.0", }, "time_offset": { "format": "f4", - "shape": ('unfiltered_events',), + "shape": ("unfiltered_events",), "fill_value": 9.96921e36, "long_name": "Time offset from epoch time", "default_data": lambda: np.linspace(0.0, 1000.0, nobs), - 'units': 'seconds', + "units": "seconds", }, } } @@ -136,13 +136,13 @@ def l2_lef_schema(settings=None): nobs = settings.get("num_obs", 123) return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', 'data/'), - 'dimensions': { - 'events': nobs, - 'scalar': 1, + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", "data/"), + "dimensions": { + "events": nobs, + "scalar": 1, }, - 'variables': { + "variables": { "l1b_geolocation_warning": { "format": "i1", "shape": (), # test explicitly the scalar case @@ -150,47 +150,47 @@ def l2_lef_schema(settings=None): }, "l1b_missing_warning": { "format": "i1", - "shape": ('scalar',), + "shape": ("scalar",), "default_data": lambda: 0 }, "l1b_radiometric_warning": { "format": "i1", - "shape": ('scalar',), + "shape": ("scalar",), "default_data": lambda: 0 }, }, - 'sector_variables': { + "sector_variables": { "event_id": { "format": "u4", - "shape": ('events',), + "shape": ("events",), "fill_value": 65535, "long_name": "ID of LI L2 Event", "default_data": lambda: np.arange(1, nobs + 1) }, "group_id": { "format": "u4", - "shape": ('events',), + "shape": ("events",), "fill_value": 65535, "long_name": "ID of associated LI L2 Group object", "default_data": lambda: np.arange(1, nobs + 1) }, "flash_id": { "format": "u4", - "shape": ('events',), + "shape": ("events",), "fill_value": 65535, "long_name": "ID of associated LI L2 Flash object", "default_data": lambda: np.arange(1, nobs + 1) }, "detector": { "format": "u4", - "shape": ('scalar',), + "shape": ("scalar",), "fill_value": 65535, "long_name": "ID of detector for this group", "default_data": lambda: 1 }, "latitude": { "format": "i2", - "shape": ('events',), + "shape": ("events",), "fill_value": -32767, "long_name": "Latitude of group", "units": "degrees_north", @@ -199,7 +199,7 @@ def l2_lef_schema(settings=None): }, "longitude": { "format": "i2", - "shape": ('events',), + "shape": ("events",), "fill_value": -32767, "long_name": "Longitude of group", "units": "degrees_east", @@ -208,7 +208,7 @@ def l2_lef_schema(settings=None): }, "radiance": { "format": "u2", - "shape": ('events',), + "shape": ("events",), "long_name": "Radiance of Flash", "standard_name": "radiance", "units": "mW.m-2.sr-1", @@ -216,34 +216,34 @@ def l2_lef_schema(settings=None): }, "event_filter_qa": { "format": "u1", - "shape": ('events',), + "shape": ("events",), "long_name": "L2 event pre-filtering quality assurance value", "default_data": lambda: np.random.randint(1, 2 ** 8 - 1, nobs) }, "epoch_time": { "format": "f8", - "shape": ('scalar',), + "shape": ("scalar",), "long_name": "Start time of integration frame", "units": "seconds since 2000-01-01 00:00:00.0", "default_data": lambda: start_ts }, "time_offset": { "format": "f4", - "shape": ('events',), + "shape": ("events",), "long_name": "Time offset from epoch time", "units": "seconds", "default_data": lambda: np.random.uniform(1, 2 ** 31 - 1, nobs) }, "detector_row": { "format": "u2", - "shape": ('events',), + "shape": ("events",), "long_name": "Detector row position of event pixel", "units": "1", "default_data": lambda: np.random.randint(1, 1000, nobs) }, "detector_column": { "format": "u2", - "shape": ('events',), + "shape": ("events",), "long_name": "Detector column position of event pixel", "units": "1", "default_data": lambda: np.random.randint(1, 1000, nobs) @@ -258,22 +258,22 @@ def l2_lgr_schema(settings=None): ngrps = settings.get("num_groups", 120) return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', ''), - 'dimensions': { - 'groups': ngrps, + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", ""), + "dimensions": { + "groups": ngrps, }, - 'variables': { + "variables": { "latitude": { "format": "f4", - "shape": ('groups',), + "shape": ("groups",), "long_name": "Latitude of group", "units": "degrees_north", "default_data": lambda: np.linspace(-90, 90, ngrps) }, "longitude": { "format": "f4", - "shape": ('groups',), + "shape": ("groups",), "long_name": "Longitude of group", "units": "degrees_east", "default_data": lambda: np.linspace(-180, 80, ngrps) @@ -292,15 +292,15 @@ def l2_lfl_schema(settings=None): etime = (datetime(2019, 1, 2) - epoch).total_seconds() return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', ''), - 'dimensions': { - 'flashes': nobs, + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", ""), + "dimensions": { + "flashes": nobs, }, - 'variables': { + "variables": { "latitude": { "format": "i2", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Latitude of Flash", "standard_name": "latitude", "units": "degrees_north", @@ -312,7 +312,7 @@ def l2_lfl_schema(settings=None): }, "longitude": { "format": "i2", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Longitude of Flash", "standard_name": "longitude", "units": "degrees_east", @@ -324,7 +324,7 @@ def l2_lfl_schema(settings=None): }, "radiance": { "format": "u2", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Radiance of Flash", "standard_name": "radiance", "units": "mW.m-2.sr-1", @@ -332,7 +332,7 @@ def l2_lfl_schema(settings=None): }, "flash_duration": { "format": "u2", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Flash duration", "standard_name": "flash_duration", "units": "ms", @@ -340,56 +340,56 @@ def l2_lfl_schema(settings=None): }, "flash_filter_confidence": { "format": "i1", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "L2 filtered flash confidence", "standard_name": "flash_filter_confidence", "default_data": lambda: np.clip(np.round(np.random.normal(20, 10, nobs)), 1, 2 ** 7 - 1) }, "flash_footprint": { "format": "u2", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Flash footprint size", - "standard_name": 'flash_footprint', + "standard_name": "flash_footprint", "units": "L1 grid pixels", "default_data": lambda: np.maximum(1, np.round(np.random.normal(5, 3, nobs))) }, "flash_id": { "format": "u4", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Flash footprint size", - "standard_name": 'flash_id', + "standard_name": "flash_id", "default_data": lambda: np.arange(1, nobs + 1) }, "flash_time": { "format": "f8", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Nominal flash time", "units": "seconds since 2000-01-01 00:00:00.0", - "standard_name": 'time', + "standard_name": "time", "precision": "1 millisecond", "default_data": lambda: np.random.uniform(stime, etime, nobs) }, "l1b_geolocation_warning": { "format": "u8", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "L1b geolocation warning", "default_data": lambda: -127 }, "l1b_radiometric_warning": { "format": "u8", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "L1b radiometric warning", "default_data": lambda: -127 }, "number_of_events": { "format": "u2", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Number of events in each flash", "default_data": lambda: 1 }, "number_of_groups": { "format": "u4", - "shape": ('flashes',), + "shape": ("flashes",), "long_name": "Number of flashes in each flash", "default_data": lambda: 1 }, @@ -403,45 +403,45 @@ def l2_af_schema(settings=None): nobs = settings.get("num_obs", 1234) return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', ''), - 'dimensions': accumulation_dimensions(1, nobs), - 'variables': { + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", ""), + "dimensions": accumulation_dimensions(1, nobs), + "variables": { "accumulation_offsets": { "format": "u4", - "shape": ('accumulations',), + "shape": ("accumulations",), "default_data": lambda: 0 }, "accumulation_start_times": { "format": "f8", - "shape": ('accumulations',), + "shape": ("accumulations",), "default_data": lambda: 4.25055600161e8 }, "l1b_geolocation_warning": { "format": "i1", - "shape": ('accumulations',), + "shape": ("accumulations",), "long_name": "L1b geolocation warning", "default_data": lambda: -127 }, "l1b_radiometric_warning": { "format": "i1", - "shape": ('accumulations',), + "shape": ("accumulations",), "long_name": "L1b radiometric warning", "default_data": lambda: -127 }, "average_flash_qa": { "format": "i1", - "shape": ('accumulations',), + "shape": ("accumulations",), "default_data": lambda: 23 }, "flash_accumulation": { "format": "u2", - "shape": ('pixels',), + "shape": ("pixels",), "default_data": lambda: np.clip(np.round(np.random.normal(1, 2, nobs)), 1, 2 ** 16 - 1) }, "mtg_geos_projection": mtg_geos_projection(), - "x": fci_grid_definition('X', nobs), - "y": fci_grid_definition('Y', nobs), + "x": fci_grid_definition("X", nobs), + "y": fci_grid_definition("Y", nobs), } } @@ -453,27 +453,27 @@ def l2_afa_schema(settings=None): nacc = settings.get("num_accumulations", 20) return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', ''), - 'dimensions': accumulation_dimensions(nacc, npix), - 'variables': { + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", ""), + "dimensions": accumulation_dimensions(nacc, npix), + "variables": { "accumulation_start_times": { "format": "f4", - "shape": ('accumulations',), + "shape": ("accumulations",), "long_name": "Accumulation start time", "units": "seconds since 2000-01-01 00:00:00.0", "default_data": lambda: np.linspace(0.0, 1.0, nacc) }, "accumulated_flash_area": { "format": "u4", - "shape": ('pixels',), + "shape": ("pixels",), "fill_value": 4294967295, "long_name": "Number of contributing unique flashes to each pixel", "default_data": lambda: np.mod(np.arange(npix), 10) + 1 }, "mtg_geos_projection": mtg_geos_projection(), - "x": fci_grid_definition('X', npix), - "y": fci_grid_definition('Y', npix), + "x": fci_grid_definition("X", npix), + "y": fci_grid_definition("Y", npix), } } @@ -485,13 +485,13 @@ def l2_afr_schema(settings=None): nacc = settings.get("num_accumulations", 20) return { - 'providers': settings.get('providers', {}), - 'variable_path': settings.get('variable_path', ''), - 'dimensions': accumulation_dimensions(nacc, nobs), - 'variables': { + "providers": settings.get("providers", {}), + "variable_path": settings.get("variable_path", ""), + "dimensions": accumulation_dimensions(nacc, nobs), + "variables": { "flash_radiance": { "format": "f4", - "shape": ('pixels',), + "shape": ("pixels",), "long_name": "Area averaged flash radiance accumulation", "grid_mapping": "mtg_geos_projection", "coordinate": "sparse: x y", @@ -499,14 +499,14 @@ def l2_afr_schema(settings=None): }, "accumulation_start_times": { "format": "f4", - "shape": ('accumulations',), + "shape": ("accumulations",), "long_name": "Accumulation start time", "units": "seconds since 2000-01-01 00:00:00.0", "default_data": lambda: 0 }, "mtg_geos_projection": mtg_geos_projection(), - "x": fci_grid_definition('X', nobs), - "y": fci_grid_definition('Y', nobs), + "x": fci_grid_definition("X", nobs), + "y": fci_grid_definition("Y", nobs), } } @@ -514,29 +514,29 @@ def l2_afr_schema(settings=None): def accumulation_dimensions(nacc, nobs): """Set dimensions for the accumulated products.""" return { - 'accumulations': nacc, - 'pixels': nobs, + "accumulations": nacc, + "pixels": nobs, } def fci_grid_definition(axis, nobs): """FCI grid definition on X or Y axis.""" - if axis == 'X': - long_name = 'azimuth angle encoded as column' - standard_name = 'projection_x_coordinate' + if axis == "X": + long_name = "azimuth angle encoded as column" + standard_name = "projection_x_coordinate" else: - long_name = 'zenith angle encoded as row' - standard_name = 'projection_y_coordinate' + long_name = "zenith angle encoded as row" + standard_name = "projection_y_coordinate" return { "format": "i2", - "shape": ('pixels',), + "shape": ("pixels",), "add_offset": -0.155619516, "axis": axis, "long_name": long_name, "scale_factor": 5.58878e-5, "standard_name": standard_name, - "units": 'radian', + "units": "radian", "valid_range": np.asarray([1, 5568]), "default_data": lambda: np.clip(np.round(np.random.normal(2000, 500, nobs)), 1, 2 ** 16 - 1) } @@ -546,49 +546,49 @@ def mtg_geos_projection(): """MTG geos projection definition.""" return { "format": "i4", - "shape": ('accumulations',), - "grid_mapping_name": 'geostationary', + "shape": ("accumulations",), + "grid_mapping_name": "geostationary", "inverse_flattening": 298.2572221, "latitude_of_projection_origin": 0, "longitude_of_projection_origin": 0, "perspective_point_height": 42164000, "semi_major_axis": 6378169, "semi_minor_axis": 6356583.8, - "sweep_angle_axis": 'y', - "long_name": 'MTG geostationary projection', + "sweep_angle_axis": "y", + "long_name": "MTG geostationary projection", "default_data": lambda: -2147483647 } products_dict = { - '2-LE': {'ftype': 'li_l2_le_nc', 'schema': l2_le_schema}, - '2-LEF': {'ftype': 'li_l2_lef_nc', 'schema': l2_lef_schema}, - '2-LGR': {'ftype': 'li_l2_lgr_nc', 'schema': l2_lgr_schema}, - '2-LFL': {'ftype': 'li_l2_lfl_nc', 'schema': l2_lfl_schema}, - '2-AF': {'ftype': 'li_l2_af_nc', 'schema': l2_af_schema}, - '2-AFA': {'ftype': 'li_l2_afa_nc', 'schema': l2_afa_schema}, - '2-AFR': {'ftype': 'li_l2_afr_nc', 'schema': l2_afr_schema}, + "2-LE": {"ftype": "li_l2_le_nc", "schema": l2_le_schema}, + "2-LEF": {"ftype": "li_l2_lef_nc", "schema": l2_lef_schema}, + "2-LGR": {"ftype": "li_l2_lgr_nc", "schema": l2_lgr_schema}, + "2-LFL": {"ftype": "li_l2_lfl_nc", "schema": l2_lfl_schema}, + "2-AF": {"ftype": "li_l2_af_nc", "schema": l2_af_schema}, + "2-AFA": {"ftype": "li_l2_afa_nc", "schema": l2_afa_schema}, + "2-AFR": {"ftype": "li_l2_afr_nc", "schema": l2_afr_schema}, } def get_product_schema(pname, settings=None): """Retrieve an LI product schema given its name.""" - return products_dict[pname]['schema'](settings) + return products_dict[pname]["schema"](settings) def extract_filetype_info(filetype_infos, filetype): """Extract Satpy-conform filetype_info from filetype_infos fixture.""" ftype_info = filetype_infos[filetype] - ftype_info['file_type'] = filetype + ftype_info["file_type"] = filetype return ftype_info def set_variable_path(var_path, desc, sname): """Replace variable default path if applicable and ensure trailing separator.""" - vpath = desc.get('path', var_path) + vpath = desc.get("path", var_path) # Ensure we have a trailing separator: - if vpath != "" and vpath[-1] != '/': - vpath += '/' + if vpath != "" and vpath[-1] != "/": + vpath += "/" if sname != "": vpath += sname + "/" return vpath @@ -606,9 +606,9 @@ def populate_dummy_data(data, names, details): # Otherwise we write the default data: if data.shape == (): # scalar case - data = desc['default_data']() + data = desc["default_data"]() else: - data[:] = desc['default_data']() + data[:] = desc["default_data"]() def add_attributes(attribs, ignored_attrs, desc): @@ -634,22 +634,22 @@ def get_variable_writer(self, dset, settings): var_path = settings.get("variable_path", "") # Also keep track of the potential providers: - providers = settings.get('providers', {}) + providers = settings.get("providers", {}) # list of ignored attribute names: ignored_attrs = ["path", "format", "shape", "default_data", "fill_value"] # dictionary of dimensions: - dims = settings.get('dimensions', {}) + dims = settings.get("dimensions", {}) def write_variable(vname, desc, sname=""): """Write a variable in our dataset.""" # get numeric shape: - shape_str = desc['shape'] + shape_str = desc["shape"] shape = tuple([dims[dname] for dname in shape_str]) # Get the desired data type: - dtype = TYPE_MAP[desc['format']] + dtype = TYPE_MAP[desc["format"]] # Prepare a numpy array with the appropriate shape and type: data = np.zeros(shape, dtype=dtype) @@ -665,8 +665,8 @@ def write_variable(vname, desc, sname=""): add_attributes(attribs, ignored_attrs, desc) # Rename the fill value attribute: - if 'fill_value' in desc: - attribs['_FillValue'] = desc['fill_value'] + if "fill_value" in desc: + attribs["_FillValue"] = desc["fill_value"] names = [vname, sname] details = [desc, providers, settings] @@ -692,7 +692,7 @@ def get_test_content(self, filename, filename_info, filetype_info): # Note: params *IS* callable below: params = params(filename, filename_info, filetype_info) # pylint: disable=not-callable - settings = get_product_schema(filetype_info['file_desc']['product_type'], params) + settings = get_product_schema(filetype_info["file_desc"]["product_type"], params) # Resulting dataset: dset = {} @@ -713,16 +713,16 @@ def get_test_content(self, filename, filename_info, filetype_info): def write_variables(self, settings, write_variable): """Write raw (i.e. not in sectors) variables.""" - if 'variables' in settings: - variables = settings.get('variables') + if "variables" in settings: + variables = settings.get("variables") for vname, desc in variables.items(): write_variable(vname, desc) def write_sector_variables(self, settings, write_variable): """Write the sector variables.""" - if 'sector_variables' in settings: - sector_vars = settings.get('sector_variables') - sectors = settings.get('sectors', ['north', 'east', 'south', 'west']) + if "sector_variables" in settings: + sector_vars = settings.get("sector_variables") + sectors = settings.get("sectors", ["north", "east", "south", "west"]) for sname in sectors: for vname, desc in sector_vars.items(): diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py index f4908c0a2b..4638bcfca3 100644 --- a/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py @@ -25,7 +25,7 @@ @pytest.fixture(params=[False, True], autouse=True) -def disable_jit(request, monkeypatch): +def _disable_jit(request, monkeypatch): """Run tests with jit enabled and disabled. Reason: Coverage report is only accurate with jit disabled. @@ -58,7 +58,7 @@ class TestFileHandler: """Test VISSR file handler.""" @pytest.fixture(autouse=True) - def patch_number_of_pixels_per_scanline(self, monkeypatch): + def _patch_number_of_pixels_per_scanline(self, monkeypatch): """Patch data types so that each scanline has two pixels.""" num_pixels = 2 IMAGE_DATA_BLOCK_IR = np.dtype( @@ -116,12 +116,12 @@ def with_compression(self, request): """Enable compression.""" return request.param - @pytest.fixture + @pytest.fixture() def open_function(self, with_compression): """Get open function for writing test files.""" return gzip.open if with_compression else open - @pytest.fixture + @pytest.fixture() def vissr_file(self, dataset_id, file_contents, open_function, tmp_path): """Get test VISSR file.""" filename = tmp_path / "vissr_file" @@ -130,7 +130,7 @@ def vissr_file(self, dataset_id, file_contents, open_function, tmp_path): writer.write(filename, file_contents) return filename - @pytest.fixture + @pytest.fixture() def file_contents(self, control_block, image_parameters, image_data): """Get VISSR file contents.""" return { @@ -139,7 +139,7 @@ def file_contents(self, control_block, image_parameters, image_data): "image_data": image_data, } - @pytest.fixture + @pytest.fixture() def control_block(self, dataset_id): """Get VISSR control block.""" block_size = {"IR1": 16, "VIS": 4} @@ -148,7 +148,7 @@ def control_block(self, dataset_id): ctrl_block["available_block_size_of_image_data"] = 2 return ctrl_block - @pytest.fixture + @pytest.fixture() def image_parameters(self, mode_block, cal_params, nav_params): """Get VISSR image parameters.""" image_params = {"mode": mode_block} @@ -156,7 +156,7 @@ def image_parameters(self, mode_block, cal_params, nav_params): image_params.update(nav_params) return image_params - @pytest.fixture + @pytest.fixture() def nav_params( self, coordinate_conversion, @@ -170,7 +170,7 @@ def nav_params( nav_params.update(coordinate_conversion) return nav_params - @pytest.fixture + @pytest.fixture() def cal_params( self, vis_calibration, @@ -186,7 +186,7 @@ def cal_params( "wv_calibration": wv_calibration, } - @pytest.fixture + @pytest.fixture() def mode_block(self): """Get VISSR mode block.""" mode = np.zeros(1, dtype=fmt.MODE_BLOCK) @@ -201,7 +201,7 @@ def mode_block(self): mode["vis_frame_parameters"]["number_of_pixels"] = 2 return mode - @pytest.fixture + @pytest.fixture() def coordinate_conversion(self, coord_conv, simple_coord_conv_table): """Get all coordinate conversion parameters.""" return { @@ -209,7 +209,7 @@ def coordinate_conversion(self, coord_conv, simple_coord_conv_table): "simple_coordinate_conversion_table": simple_coord_conv_table } - @pytest.fixture + @pytest.fixture() def coord_conv(self): """Get parameters for coordinate conversions. @@ -227,7 +227,7 @@ def coord_conv(self): cpix["IR1"] = 0.5 # instead of 1672.5 cpix["VIS"] = 0.5 # instead of 6688.5 - conv['scheduled_observation_time'] = 50130.979089568464 + conv["scheduled_observation_time"] = 50130.979089568464 nsensors = conv["number_of_sensor_elements"] nsensors["IR1"] = 1 @@ -255,14 +255,14 @@ def coord_conv(self): conv["orbital_parameters"]["latitude_of_ssp"] = 1.0 return conv - @pytest.fixture + @pytest.fixture() def attitude_prediction(self): """Get attitude prediction.""" att_pred = np.zeros(1, dtype=fmt.ATTITUDE_PREDICTION) att_pred["data"] = real_world.ATTITUDE_PREDICTION return {"attitude_prediction": att_pred} - @pytest.fixture + @pytest.fixture() def orbit_prediction(self, orbit_prediction_1, orbit_prediction_2): """Get predictions of orbital parameters.""" return { @@ -270,21 +270,21 @@ def orbit_prediction(self, orbit_prediction_1, orbit_prediction_2): "orbit_prediction_2": orbit_prediction_2 } - @pytest.fixture + @pytest.fixture() def orbit_prediction_1(self): """Get first block of orbit prediction data.""" orb_pred = np.zeros(1, dtype=fmt.ORBIT_PREDICTION) orb_pred["data"] = real_world.ORBIT_PREDICTION_1 return orb_pred - @pytest.fixture + @pytest.fixture() def orbit_prediction_2(self): """Get second block of orbit prediction data.""" orb_pred = np.zeros(1, dtype=fmt.ORBIT_PREDICTION) orb_pred["data"] = real_world.ORBIT_PREDICTION_2 return orb_pred - @pytest.fixture + @pytest.fixture() def vis_calibration(self): """Get VIS calibration block.""" vis_cal = np.zeros(1, dtype=fmt.VIS_CALIBRATION) @@ -292,7 +292,7 @@ def vis_calibration(self): table[0, 0:4] = np.array([0, 0.25, 0.5, 1]) return vis_cal - @pytest.fixture + @pytest.fixture() def ir1_calibration(self): """Get IR1 calibration block.""" cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) @@ -300,32 +300,32 @@ def ir1_calibration(self): table[0, 0:4] = np.array([0, 100, 200, 300]) return cal - @pytest.fixture + @pytest.fixture() def ir2_calibration(self): """Get IR2 calibration block.""" cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) return cal - @pytest.fixture + @pytest.fixture() def wv_calibration(self): """Get WV calibration block.""" cal = np.zeros(1, dtype=fmt.IR_CALIBRATION) return cal - @pytest.fixture + @pytest.fixture() def simple_coord_conv_table(self): """Get simple coordinate conversion table.""" table = np.zeros(1, dtype=fmt.SIMPLE_COORDINATE_CONVERSION_TABLE) table["satellite_height"] = 123457.0 return table - @pytest.fixture + @pytest.fixture() def image_data(self, dataset_id, image_data_ir1, image_data_vis): """Get VISSR image data.""" data = {"IR1": image_data_ir1, "VIS": image_data_vis} return data[dataset_id["name"]] - @pytest.fixture + @pytest.fixture() def image_data_ir1(self): """Get IR1 image data.""" image_data = np.zeros(2, fmt.IMAGE_DATA_BLOCK_IR) @@ -336,7 +336,7 @@ def image_data_ir1(self): image_data["image_data"] = [[0, 1], [2, 3]] return image_data - @pytest.fixture + @pytest.fixture() def image_data_vis(self): """Get VIS image data.""" image_data = np.zeros(2, fmt.IMAGE_DATA_BLOCK_VIS) @@ -347,7 +347,7 @@ def image_data_vis(self): image_data["image_data"] = [[0, 1], [2, 3]] return image_data - @pytest.fixture + @pytest.fixture() def vissr_file_like(self, vissr_file, with_compression): """Get file-like object for VISSR test file.""" if with_compression: @@ -355,14 +355,14 @@ def vissr_file_like(self, vissr_file, with_compression): return FSFile(open_file) return vissr_file - @pytest.fixture + @pytest.fixture() def file_handler(self, vissr_file_like, mask_space): """Get file handler to be tested.""" return vissr.GMS5VISSRFileHandler( vissr_file_like, {}, {}, mask_space=mask_space ) - @pytest.fixture + @pytest.fixture() def vis_refl_exp(self, mask_space, lons_lats_exp): """Get expected VIS reflectance.""" lons, lats = lons_lats_exp @@ -384,7 +384,7 @@ def vis_refl_exp(self, mask_space, lons_lats_exp): }, ) - @pytest.fixture + @pytest.fixture() def ir1_counts_exp(self, lons_lats_exp): """Get expected IR1 counts.""" lons, lats = lons_lats_exp @@ -402,7 +402,7 @@ def ir1_counts_exp(self, lons_lats_exp): }, ) - @pytest.fixture + @pytest.fixture() def ir1_bt_exp(self, lons_lats_exp): """Get expected IR1 brightness temperature.""" lons, lats = lons_lats_exp @@ -420,7 +420,7 @@ def ir1_bt_exp(self, lons_lats_exp): }, ) - @pytest.fixture + @pytest.fixture() def lons_lats_exp(self, dataset_id): """Get expected lon/lat coordinates. @@ -456,7 +456,7 @@ def lons_lats_exp(self, dataset_id): lats = xr.DataArray(exp["lats"], dims=("y", "x")) return lons, lats - @pytest.fixture + @pytest.fixture() def dataset_exp(self, dataset_id, ir1_counts_exp, ir1_bt_exp, vis_refl_exp): """Get expected dataset.""" ir1_counts_id = make_dataid(name="IR1", calibration="counts", resolution=5000) @@ -473,7 +473,7 @@ def dataset_exp(self, dataset_id, ir1_counts_exp, ir1_bt_exp, vis_refl_exp): } return expectations[dataset_id] - @pytest.fixture + @pytest.fixture() def area_def_exp(self, dataset_id): """Get expected area definition.""" if dataset_id["name"] == "IR1": @@ -507,7 +507,7 @@ def area_def_exp(self, dataset_id): height=size, ) - @pytest.fixture + @pytest.fixture() def attrs_exp(self, area_def_exp): """Get expected dataset attributes.""" return { @@ -546,7 +546,7 @@ def test_time_attributes(self, file_handler, attrs_exp): class TestCorruptFile: """Test reading corrupt files.""" - @pytest.fixture + @pytest.fixture() def file_contents(self): """Get corrupt file contents (all zero).""" control_block = np.zeros(1, dtype=fmt.CONTROL_BLOCK) @@ -557,7 +557,7 @@ def file_contents(self): "image_data": image_data, } - @pytest.fixture + @pytest.fixture() def corrupt_file(self, file_contents, tmp_path): """Write corrupt VISSR file to disk.""" filename = tmp_path / "my_vissr_file" diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py index 144139a07a..2a1a1cade9 100644 --- a/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py +++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py @@ -19,9 +19,9 @@ IR_NAVIGATION_REFERENCE = [ { "pixel": nav.Pixel(line=686, pixel=1680), - 'lon': 139.990380, - 'lat': 35.047056, - 'nav_params': nav.PixelNavigationParameters( + "lon": 139.990380, + "lat": 35.047056, + "nav_params": nav.PixelNavigationParameters( attitude=nav.Attitude( angle_between_earth_and_sun=3.997397917902958, angle_between_sat_spin_and_z_axis=3.149118633034304, @@ -67,9 +67,9 @@ }, { "pixel": nav.Pixel(line=2089, pixel=1793), - 'lon': 144.996967, - 'lat': -34.959853, - 'nav_params': nav.PixelNavigationParameters( + "lon": 144.996967, + "lat": -34.959853, + "nav_params": nav.PixelNavigationParameters( attitude=nav.Attitude( angle_between_earth_and_sun=3.935707944355762, angle_between_sat_spin_and_z_axis=3.149118633034304, @@ -119,9 +119,9 @@ VIS_NAVIGATION_REFERENCE = [ { "pixel": nav.Pixel(line=2744, pixel=6720), - 'lon': 139.975527, - 'lat': 35.078028, - 'nav_params': nav.PixelNavigationParameters( + "lon": 139.975527, + "lat": 35.078028, + "nav_params": nav.PixelNavigationParameters( attitude=nav.Attitude( angle_between_earth_and_sun=3.997397918405798, angle_between_sat_spin_and_z_axis=3.149118633034304, @@ -167,9 +167,9 @@ }, { "pixel": nav.Pixel(line=8356, pixel=7172), - 'lon': 144.980104, - 'lat': -34.929123, - 'nav_params': nav.PixelNavigationParameters( + "lon": 144.980104, + "lat": -34.929123, + "nav_params": nav.PixelNavigationParameters( attitude=nav.Attitude( angle_between_earth_and_sun=3.935707944858620, angle_between_sat_spin_and_z_axis=3.149118633034304, @@ -219,7 +219,7 @@ @pytest.fixture(params=[False, True], autouse=True) -def disable_jit(request, monkeypatch): +def _disable_jit(request, monkeypatch): """Run tests with jit enabled and disabled. Reason: Coverage report is only accurate with jit disabled. @@ -234,7 +234,7 @@ class TestSinglePixelNavigation: """Test navigation of a single pixel.""" @pytest.mark.parametrize( - "point,nav_params,expected", + ("point", "nav_params", "expected"), [ (ref["pixel"], ref["nav_params"], (ref["lon"], ref["lat"])) for ref in NAVIGATION_REFERENCE @@ -297,7 +297,7 @@ def test_intersect_view_vector_with_earth(self): np.testing.assert_allclose(point, exp) @pytest.mark.parametrize( - "point_earth_fixed,point_geodetic_exp", + ("point_earth_fixed", "point_geodetic_exp"), [ ([0, 0, 1], [0, 90]), ([0, 0, -1], [0, -90]), @@ -328,7 +328,7 @@ def test_normalize_vector(self): class TestImageNavigation: """Test navigation of an entire image.""" - @pytest.fixture + @pytest.fixture() def expected(self): """Get expected coordinates.""" exp = { @@ -356,7 +356,7 @@ class TestPredictionInterpolation: """Test interpolation of orbit and attitude predictions.""" @pytest.mark.parametrize( - "obs_time,expected", [(-1, np.nan), (1.5, 2.5), (5, np.nan)] + ("obs_time", "expected"), [(-1, np.nan), (1.5, 2.5), (5, np.nan)] ) def test_interpolate_continuous(self, obs_time, expected): """Test interpolation of continuous variables.""" @@ -366,7 +366,7 @@ def test_interpolate_continuous(self, obs_time, expected): np.testing.assert_allclose(res, expected) @pytest.mark.parametrize( - "obs_time,expected", + ("obs_time", "expected"), [ (-1, np.nan), (1.5, 0.75 * np.pi), @@ -385,7 +385,7 @@ def test_interpolate_angles(self, obs_time, expected): np.testing.assert_allclose(res, expected) @pytest.mark.parametrize( - "obs_time,expected", + ("obs_time", "expected"), [ (-1, np.nan * np.ones((2, 2))), (1.5, [[1, 0], [0, 2]]), @@ -417,12 +417,12 @@ def test_interpolate_attitude_prediction( attitude = nav.interpolate_attitude_prediction(attitude_prediction, obs_time) _assert_namedtuple_close(attitude, attitude_expected) - @pytest.fixture + @pytest.fixture() def obs_time(self): """Get observation time.""" return 2.5 - @pytest.fixture + @pytest.fixture() def orbit_expected(self): """Get expected orbit.""" return nav.Orbit( @@ -439,7 +439,7 @@ def orbit_expected(self): nutation_precession=1.6 * np.identity(3), ) - @pytest.fixture + @pytest.fixture() def attitude_expected(self): """Get expected attitude.""" return nav.Attitude( @@ -449,13 +449,13 @@ def attitude_expected(self): ) -@pytest.fixture +@pytest.fixture() def sampling_angle(): """Get sampling angle.""" return 0.000095719995443 -@pytest.fixture +@pytest.fixture() def scan_params(sampling_angle): """Get scanning parameters.""" return nav.ScanningParameters( @@ -466,7 +466,7 @@ def scan_params(sampling_angle): ) -@pytest.fixture +@pytest.fixture() def attitude_prediction(): """Get attitude prediction.""" return nav.AttitudePrediction( @@ -479,7 +479,7 @@ def attitude_prediction(): ) -@pytest.fixture +@pytest.fixture() def orbit_prediction(): """Get orbit prediction.""" return nav.OrbitPrediction( @@ -505,7 +505,7 @@ def orbit_prediction(): ) -@pytest.fixture +@pytest.fixture() def proj_params(sampling_angle): """Get projection parameters.""" return nav.ProjectionParameters( @@ -525,19 +525,19 @@ def proj_params(sampling_angle): ) -@pytest.fixture +@pytest.fixture() def static_nav_params(proj_params, scan_params): """Get static navigation parameters.""" return nav.StaticNavigationParameters(proj_params, scan_params) -@pytest.fixture +@pytest.fixture() def predicted_nav_params(attitude_prediction, orbit_prediction): """Get predicted navigation parameters.""" return nav.PredictedNavigationParameters(attitude_prediction, orbit_prediction) -@pytest.fixture +@pytest.fixture() def navigation_params(static_nav_params, predicted_nav_params): """Get image navigation parameters.""" return nav.ImageNavigationParameters(static_nav_params, predicted_nav_params) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index 49331f5421..66221c613e 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -32,12 +32,12 @@ # Level 1 Fixtures AVAILABLE_1KM_VIS_PRODUCT_NAMES = [str(x) for x in range(8, 13)] -AVAILABLE_1KM_VIS_PRODUCT_NAMES += ['13lo', '13hi', '14lo', '14hi'] +AVAILABLE_1KM_VIS_PRODUCT_NAMES += ["13lo", "13hi", "14lo", "14hi"] AVAILABLE_1KM_VIS_PRODUCT_NAMES += [str(x) for x in range(15, 20)] AVAILABLE_1KM_IR_PRODUCT_NAMES = [str(x) for x in range(20, 37)] AVAILABLE_1KM_PRODUCT_NAMES = AVAILABLE_1KM_VIS_PRODUCT_NAMES + AVAILABLE_1KM_IR_PRODUCT_NAMES AVAILABLE_HKM_PRODUCT_NAMES = [str(x) for x in range(3, 8)] -AVAILABLE_QKM_PRODUCT_NAMES = ['1', '2'] +AVAILABLE_QKM_PRODUCT_NAMES = ["1", "2"] SCAN_LEN_5KM = 6 # 3 scans of 5km data SCAN_WIDTH_5KM = 270 SCALE_FACTOR = 0.5 @@ -101,14 +101,14 @@ def _generate_visible_uncertainty_data(shape: tuple) -> np.ndarray: def _get_lonlat_variable_info(resolution: int) -> dict: lon_5km, lat_5km = _generate_lonlat_data(resolution) return { - 'Latitude': {'data': lat_5km, - 'type': SDC.FLOAT32, - 'fill_value': -999, - 'attrs': {'dim_labels': ['Cell_Along_Swath_5km:mod35', 'Cell_Across_Swath_5km:mod35']}}, - 'Longitude': {'data': lon_5km, - 'type': SDC.FLOAT32, - 'fill_value': -999, - 'attrs': {'dim_labels': ['Cell_Along_Swath_5km:mod35', 'Cell_Across_Swath_5km:mod35']}}, + "Latitude": {"data": lat_5km, + "type": SDC.FLOAT32, + "fill_value": -999, + "attrs": {"dim_labels": ["Cell_Along_Swath_5km:mod35", "Cell_Across_Swath_5km:mod35"]}}, + "Longitude": {"data": lon_5km, + "type": SDC.FLOAT32, + "fill_value": -999, + "attrs": {"dim_labels": ["Cell_Along_Swath_5km:mod35", "Cell_Across_Swath_5km:mod35"]}}, } @@ -116,19 +116,19 @@ def _get_angles_variable_info(resolution: int) -> dict: angle_data = _generate_angle_data(resolution) dim_factor = RES_TO_REPEAT_FACTOR[resolution] * 2 angle_info = { - 'data': angle_data, - 'type': SDC.INT16, - 'fill_value': -32767, - 'attrs': { - 'dim_labels': [ - f'{dim_factor}*nscans:MODIS_SWATH_Type_L1B', - '1KM_geo_dim:MODIS_SWATH_Type_L1B'], - 'scale_factor': 0.01, - 'add_offset': -0.01, + "data": angle_data, + "type": SDC.INT16, + "fill_value": -32767, + "attrs": { + "dim_labels": [ + f"{dim_factor}*nscans:MODIS_SWATH_Type_L1B", + "1KM_geo_dim:MODIS_SWATH_Type_L1B"], + "scale_factor": 0.01, + "add_offset": -0.01, }, } angles_info = {} - for var_name in ('SensorAzimuth', 'SensorZenith', 'SolarAzimuth', 'SolarZenith'): + for var_name in ("SensorAzimuth", "SensorZenith", "SolarAzimuth", "SolarZenith"): angles_info[var_name] = angle_info return angles_info @@ -139,30 +139,30 @@ def _get_visible_variable_info(var_name: str, resolution: int, bands: list[str]) uncertainty = _generate_visible_uncertainty_data(data.shape) dim_factor = RES_TO_REPEAT_FACTOR[resolution] * 2 band_dim_name = f"Band_{resolution}_{num_bands}_RefSB:MODIS_SWATH_Type_L1B" - row_dim_name = f'{dim_factor}*nscans:MODIS_SWATH_Type_L1B' - col_dim_name = 'Max_EV_frames:MODIS_SWATH_Type_L1B' + row_dim_name = f"{dim_factor}*nscans:MODIS_SWATH_Type_L1B" + col_dim_name = "Max_EV_frames:MODIS_SWATH_Type_L1B" return { var_name: { - 'data': data, - 'type': SDC.UINT16, - 'fill_value': 0, - 'attrs': { + "data": data, + "type": SDC.UINT16, + "fill_value": 0, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [band_dim_name, + "dim_labels": [band_dim_name, row_dim_name, col_dim_name], - 'valid_range': (0, 32767), - 'reflectance_scales': (2.0,) * num_bands, - 'reflectance_offsets': (-0.5,) * num_bands, - 'band_names': ",".join(bands), + "valid_range": (0, 32767), + "reflectance_scales": (2.0,) * num_bands, + "reflectance_offsets": (-0.5,) * num_bands, + "band_names": ",".join(bands), }, }, - var_name + '_Uncert_Indexes': { - 'data': uncertainty, - 'type': SDC.UINT8, - 'fill_value': 255, - 'attrs': { - 'dim_labels': [band_dim_name, + var_name + "_Uncert_Indexes": { + "data": uncertainty, + "type": SDC.UINT8, + "fill_value": 255, + "attrs": { + "dim_labels": [band_dim_name, row_dim_name, col_dim_name], }, @@ -175,27 +175,27 @@ def _get_emissive_variable_info(var_name: str, resolution: int, bands: list[str] data = _generate_visible_data(resolution, len(bands)) dim_factor = RES_TO_REPEAT_FACTOR[resolution] * 2 band_dim_name = f"Band_{resolution}_{num_bands}_Emissive:MODIS_SWATH_Type_L1B" - row_dim_name = f'{dim_factor}*nscans:MODIS_SWATH_Type_L1B' - col_dim_name = 'Max_EV_frames:MODIS_SWATH_Type_L1B' + row_dim_name = f"{dim_factor}*nscans:MODIS_SWATH_Type_L1B" + col_dim_name = "Max_EV_frames:MODIS_SWATH_Type_L1B" return { var_name: { - 'data': data, - 'type': SDC.UINT16, - 'fill_value': 0, - 'attrs': { - 'dim_labels': [band_dim_name, + "data": data, + "type": SDC.UINT16, + "fill_value": 0, + "attrs": { + "dim_labels": [band_dim_name, row_dim_name, col_dim_name], - 'valid_range': (0, 32767), - 'band_names': ",".join(bands), + "valid_range": (0, 32767), + "band_names": ",".join(bands), }, }, - var_name + '_Uncert_Indexes': { - 'data': np.zeros(data.shape, dtype=np.uint8), - 'type': SDC.UINT8, - 'fill_value': 255, - 'attrs': { - 'dim_labels': [band_dim_name, + var_name + "_Uncert_Indexes": { + "data": np.zeros(data.shape, dtype=np.uint8), + "type": SDC.UINT8, + "fill_value": 255, + "attrs": { + "dim_labels": [band_dim_name, row_dim_name, col_dim_name], }, @@ -217,13 +217,13 @@ def _get_l1b_geo_variable_info(filename: str, def generate_nasa_l1b_filename(prefix): """Generate a filename that follows NASA MODIS L1b convention.""" now = datetime.now() - return f'{prefix}_A{now:%y%j_%H%M%S}_{now:%Y%j%H%M%S}.hdf' + return f"{prefix}_A{now:%y%j_%H%M%S}_{now:%Y%j%H%M%S}.hdf" def generate_imapp_filename(suffix): """Generate a filename that follows IMAPP MODIS L1b convention.""" now = datetime.now() - return f't1.{now:%y%j.%H%M}.{suffix}.hdf' + return f"t1.{now:%y%j.%H%M}.{suffix}.hdf" def create_hdfeos_test_file(filename: str, @@ -262,17 +262,17 @@ def create_hdfeos_test_file(filename: str, def _add_variable_to_file(h, var_name, var_info): - v = h.create(var_name, var_info['type'], var_info['data'].shape) - v[:] = var_info['data'] + v = h.create(var_name, var_info["type"], var_info["data"].shape) + v[:] = var_info["data"] dim_count = 0 - for dimension_name in var_info['attrs']['dim_labels']: + for dimension_name in var_info["attrs"]["dim_labels"]: v.dim(dim_count).setname(dimension_name) dim_count += 1 - v.setfillvalue(var_info['fill_value']) - v.scale_factor = var_info['attrs'].get('scale_factor', SCALE_FACTOR) - v.add_offset = var_info['attrs'].get('add_offset', ADD_OFFSET) - for attr_key, attr_val in var_info['attrs'].items(): - if attr_key == 'dim_labels': + v.setfillvalue(var_info["fill_value"]) + v.scale_factor = var_info["attrs"].get("scale_factor", SCALE_FACTOR) + v.add_offset = var_info["attrs"].get("add_offset", ADD_OFFSET) + for attr_key, attr_val in var_info["attrs"].items(): + if attr_key == "dim_labels": continue setattr(v, attr_key, attr_val) @@ -281,12 +281,12 @@ def _create_core_metadata(file_shortname: str) -> str: beginning_date = datetime.now() ending_date = beginning_date + timedelta(minutes=5) core_metadata_header = "GROUP = INVENTORYMETADATA\nGROUPTYPE = MASTERGROUP\n\n" \ - "GROUP = RANGEDATETIME\n\nOBJECT = RANGEBEGINNINGDATE\nNUM_VAL = 1\nVALUE = \"{}\"\n" \ + 'GROUP = RANGEDATETIME\n\nOBJECT = RANGEBEGINNINGDATE\nNUM_VAL = 1\nVALUE = "{}"\n' \ "END_OBJECT = RANGEBEGINNINGDATE\n\nOBJECT = RANGEBEGINNINGTIME\n" \ - "NUM_VAL = 1\nVALUE = \"{}\"\n" \ + 'NUM_VAL = 1\nVALUE = "{}"\n' \ "END_OBJECT = RANGEBEGINNINGTIME\n\nOBJECT = RANGEENDINGDATE\n" \ - "NUM_VAL = 1\nVALUE = \"{}\"\n" \ - "END_OBJECT = RANGEENDINGDATE\n\nOBJECT = RANGEENDINGTIME\nNUM_VAL = 1\nVALUE = \"{}\"\n" \ + 'NUM_VAL = 1\nVALUE = "{}"\n' \ + 'END_OBJECT = RANGEENDINGDATE\n\nOBJECT = RANGEENDINGTIME\nNUM_VAL = 1\nVALUE = "{}"\n' \ "END_OBJECT = RANGEENDINGTIME\nEND_GROUP = RANGEDATETIME" core_metadata_header = core_metadata_header.format( beginning_date.strftime("%Y-%m-%d"), @@ -295,13 +295,13 @@ def _create_core_metadata(file_shortname: str) -> str: ending_date.strftime("%H:%M:%S.%f") ) inst_metadata = "GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR\n\n" \ - "OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER\nCLASS = \"1\"\n\n" \ - "OBJECT = ASSOCIATEDSENSORSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \ - "VALUE = \"MODIS\"\nEND_OBJECT = ASSOCIATEDSENSORSHORTNAME\n\n" \ - "OBJECT = ASSOCIATEDPLATFORMSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \ - "VALUE = \"Terra\"\nEND_OBJECT = ASSOCIATEDPLATFORMSHORTNAME\n\n" \ - "OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \ - "VALUE = \"MODIS\"\nEND_OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\n\n" \ + 'OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER\nCLASS = "1"\n\n' \ + 'OBJECT = ASSOCIATEDSENSORSHORTNAME\nCLASS = "1"\nNUM_VAL = 1\n' \ + 'VALUE = "MODIS"\nEND_OBJECT = ASSOCIATEDSENSORSHORTNAME\n\n' \ + 'OBJECT = ASSOCIATEDPLATFORMSHORTNAME\nCLASS = "1"\nNUM_VAL = 1\n' \ + 'VALUE = "Terra"\nEND_OBJECT = ASSOCIATEDPLATFORMSHORTNAME\n\n' \ + 'OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\nCLASS = "1"\nNUM_VAL = 1\n' \ + 'VALUE = "MODIS"\nEND_OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\n\n' \ "END_OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER\n\n" \ "END_GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR\n\n" collection_metadata = "GROUP = COLLECTIONDESCRIPTIONCLASS\n\nOBJECT = SHORTNAME\nNUM_VAL = 1\n" \ @@ -318,7 +318,7 @@ def _create_struct_metadata(geo_resolution: int) -> str: "GROUP=SWATH_1\n" \ "GROUP=DimensionMap\n" \ "OBJECT=DimensionMap_2\n" \ - f"GeoDimension=\"{geo_dim_factor}*nscans\"\n" \ + f'GeoDimension="{geo_dim_factor}*nscans"\n' \ "END_OBJECT=DimensionMap_2\n" \ "END_GROUP=DimensionMap\n" \ "END_GROUP=SWATH_1\n" \ @@ -370,7 +370,7 @@ def modis_l1b_nasa_mod02hkm_file(tmpdir_factory) -> list[str]: return [full_path] -@pytest.fixture +@pytest.fixture() def modis_l1b_nasa_mod02qkm_file(tmpdir_factory) -> list[str]: """Create a single MOD02QKM file following standard NASA file scheme.""" filename = generate_nasa_l1b_filename("MOD02Qkm") @@ -413,20 +413,20 @@ def modis_l1b_nasa_1km_mod03_files(modis_l1b_nasa_mod021km_file, modis_l1b_nasa_ def _get_basic_variable_info(var_name: str, resolution: int) -> dict: shape = _shape_for_resolution(resolution) data = np.ones((shape[0], shape[1]), dtype=np.uint16) - row_dim_name = f'Cell_Along_Swath_{resolution}m:modl2' - col_dim_name = f'Cell_Across_Swath_{resolution}m:modl2' + row_dim_name = f"Cell_Along_Swath_{resolution}m:modl2" + col_dim_name = f"Cell_Across_Swath_{resolution}m:modl2" return { var_name: { - 'data': data, - 'type': SDC.UINT16, - 'fill_value': 0, - 'attrs': { + "data": data, + "type": SDC.UINT16, + "fill_value": 0, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [row_dim_name, + "dim_labels": [row_dim_name, col_dim_name], - 'valid_range': (0, 32767), - 'scale_factor': 2.0, - 'add_offset': -1.0, + "valid_range": (0, 32767), + "scale_factor": 2.0, + "add_offset": -1.0, }, }, } @@ -437,35 +437,35 @@ def _get_cloud_mask_variable_info(var_name: str, resolution: int) -> dict: shape = _shape_for_resolution(resolution) data = np.zeros((num_bytes, shape[0], shape[1]), dtype=np.int8) byte_dim_name = "Byte_Segment:mod35" - row_dim_name = 'Cell_Along_Swath_1km:mod35' - col_dim_name = 'Cell_Across_Swath_1km:mod35' + row_dim_name = "Cell_Along_Swath_1km:mod35" + col_dim_name = "Cell_Across_Swath_1km:mod35" return { var_name: { - 'data': data, - 'type': SDC.INT8, - 'fill_value': 0, - 'attrs': { + "data": data, + "type": SDC.INT8, + "fill_value": 0, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [byte_dim_name, + "dim_labels": [byte_dim_name, row_dim_name, col_dim_name], - 'valid_range': (0, -1), - 'scale_factor': 1., - 'add_offset': 0., + "valid_range": (0, -1), + "scale_factor": 1., + "add_offset": 0., }, }, - 'Quality_Assurance': { - 'data': np.ones((shape[0], shape[1], 10), dtype=np.int8), - 'type': SDC.INT8, - 'fill_value': 0, - 'attrs': { + "Quality_Assurance": { + "data": np.ones((shape[0], shape[1], 10), dtype=np.int8), + "type": SDC.INT8, + "fill_value": 0, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [row_dim_name, + "dim_labels": [row_dim_name, col_dim_name, - 'Quality_Dimension:mod35'], - 'valid_range': (0, -1), - 'scale_factor': 2., - 'add_offset': -0.5, + "Quality_Dimension:mod35"], + "valid_range": (0, -1), + "scale_factor": 2., + "add_offset": -0.5, }, }, } @@ -474,47 +474,47 @@ def _get_cloud_mask_variable_info(var_name: str, resolution: int) -> dict: def _get_mask_byte1_variable_info() -> dict: shape = _shape_for_resolution(1000) data = np.zeros((shape[0], shape[1]), dtype=np.uint16) - row_dim_name = 'Cell_Along_Swath_1km:mod35' - col_dim_name = 'Cell_Across_Swath_1km:mod35' + row_dim_name = "Cell_Along_Swath_1km:mod35" + col_dim_name = "Cell_Across_Swath_1km:mod35" return { "MODIS_Cloud_Mask": { - 'data': data, - 'type': SDC.UINT16, - 'fill_value': 9999, - 'attrs': { + "data": data, + "type": SDC.UINT16, + "fill_value": 9999, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [row_dim_name, + "dim_labels": [row_dim_name, col_dim_name], - 'valid_range': (0, 4), - 'scale_factor': 2, - 'add_offset': -1, + "valid_range": (0, 4), + "scale_factor": 2, + "add_offset": -1, }, }, "MODIS_Simple_LandSea_Mask": { - 'data': data, - 'type': SDC.UINT16, - 'fill_value': 9999, - 'attrs': { + "data": data, + "type": SDC.UINT16, + "fill_value": 9999, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [row_dim_name, + "dim_labels": [row_dim_name, col_dim_name], - 'valid_range': (0, 4), - 'scale_factor': 2, - 'add_offset': -1, + "valid_range": (0, 4), + "scale_factor": 2, + "add_offset": -1, }, }, "MODIS_Snow_Ice_Flag": { - 'data': data, - 'type': SDC.UINT16, - 'fill_value': 9999, - 'attrs': { + "data": data, + "type": SDC.UINT16, + "fill_value": 9999, + "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - 'dim_labels': [row_dim_name, + "dim_labels": [row_dim_name, col_dim_name], - 'valid_range': (0, 2), - 'scale_factor': 2, - 'add_offset': -1, + "valid_range": (0, 2), + "scale_factor": 2, + "add_offset": -1, }, }, } @@ -523,7 +523,7 @@ def _get_mask_byte1_variable_info() -> dict: def generate_nasa_l2_filename(prefix: str) -> str: """Generate a file name that follows MODIS 35 L2 convention in a temporary directory.""" now = datetime.now() - return f'{prefix}_L2.A{now:%Y%j.%H%M}.061.{now:%Y%j%H%M%S}.hdf' + return f"{prefix}_L2.A{now:%Y%j.%H%M}.061.{now:%Y%j%H%M%S}.hdf" @pytest.fixture(scope="session") diff --git a/satpy/tests/reader_tests/modis_tests/conftest.py b/satpy/tests/reader_tests/modis_tests/conftest.py index 09f98049db..e6a8432653 100644 --- a/satpy/tests/reader_tests/modis_tests/conftest.py +++ b/satpy/tests/reader_tests/modis_tests/conftest.py @@ -17,7 +17,7 @@ # satpy. If not, see . """Setup and configuration for all reader tests.""" -from ._modis_fixtures import ( +from ._modis_fixtures import ( # noqa: F401, I001 modis_l1b_imapp_1000m_file, modis_l1b_imapp_geo_file, modis_l1b_nasa_1km_mod03_files, diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py index 53f0ca46ce..11068b6577 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py @@ -50,7 +50,7 @@ def _check_shared_metadata(data_arr): assert data_arr.attrs["platform_name"] == "EOS-Terra" assert "rows_per_scan" in data_arr.attrs assert isinstance(data_arr.attrs["rows_per_scan"], int) - assert data_arr.attrs['reader'] == 'modis_l1b' + assert data_arr.attrs["reader"] == "modis_l1b" assert "resolution" in data_arr.attrs res = data_arr.attrs["resolution"] if res == 5000: @@ -91,30 +91,30 @@ class TestModisL1b: def test_available_reader(self): """Test that MODIS L1b reader is available.""" - assert 'modis_l1b' in available_readers() + assert "modis_l1b" in available_readers() @pytest.mark.parametrize( - ('input_files', 'expected_names', 'expected_data_res', 'expected_geo_res'), + ("input_files", "expected_names", "expected_data_res", "expected_geo_res"), [ - [lazy_fixture('modis_l1b_nasa_mod021km_file'), + (lazy_fixture("modis_l1b_nasa_mod021km_file"), AVAILABLE_1KM_PRODUCT_NAMES + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, - [1000], [5000, 1000]], - [lazy_fixture('modis_l1b_imapp_1000m_file'), + [1000], [5000, 1000]), + (lazy_fixture("modis_l1b_imapp_1000m_file"), AVAILABLE_1KM_PRODUCT_NAMES + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, - [1000], [5000, 1000]], - [lazy_fixture('modis_l1b_nasa_mod02hkm_file'), - AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [500], [1000, 500, 250]], - [lazy_fixture('modis_l1b_nasa_mod02qkm_file'), - AVAILABLE_QKM_PRODUCT_NAMES, [250], [1000, 500, 250]], + [1000], [5000, 1000]), + (lazy_fixture("modis_l1b_nasa_mod02hkm_file"), + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [500], [1000, 500, 250]), + (lazy_fixture("modis_l1b_nasa_mod02qkm_file"), + AVAILABLE_QKM_PRODUCT_NAMES, [250], [1000, 500, 250]), ] ) def test_scene_available_datasets(self, input_files, expected_names, expected_data_res, expected_geo_res): """Test that datasets are available.""" - scene = Scene(reader='modis_l1b', filenames=input_files) + scene = Scene(reader="modis_l1b", filenames=input_files) available_datasets = scene.available_dataset_names() assert len(available_datasets) > 0 - assert 'longitude' in available_datasets - assert 'latitude' in available_datasets + assert "longitude" in available_datasets + assert "latitude" in available_datasets for chan_name in expected_names: assert chan_name in available_datasets @@ -123,8 +123,8 @@ def test_scene_available_datasets(self, input_files, expected_names, expected_da available_geos = {x: [] for x in expected_geo_res} # Make sure that every resolution from the reader is what we expect for data_id in available_data_ids: - res = data_id['resolution'] - if data_id['name'] in ['longitude', 'latitude']: + res = data_id["resolution"] + if data_id["name"] in ["longitude", "latitude"]: assert res in expected_geo_res available_geos[res].append(data_id) else: @@ -138,29 +138,29 @@ def test_scene_available_datasets(self, input_files, expected_names, expected_da assert avail_id, f"Missing geo datasets for geo resolution {exp_res}" @pytest.mark.parametrize( - ('input_files', 'has_5km', 'has_500', 'has_250', 'default_res'), + ("input_files", "has_5km", "has_500", "has_250", "default_res"), [ - [lazy_fixture('modis_l1b_nasa_mod021km_file'), - True, False, False, 1000], - [lazy_fixture('modis_l1b_imapp_1000m_file'), - True, False, False, 1000], - [lazy_fixture('modis_l1b_nasa_mod02hkm_file'), - False, True, True, 250], - [lazy_fixture('modis_l1b_nasa_mod02qkm_file'), - False, True, True, 250], - [lazy_fixture('modis_l1b_nasa_1km_mod03_files'), - True, True, True, 250], + (lazy_fixture("modis_l1b_nasa_mod021km_file"), + True, False, False, 1000), + (lazy_fixture("modis_l1b_imapp_1000m_file"), + True, False, False, 1000), + (lazy_fixture("modis_l1b_nasa_mod02hkm_file"), + False, True, True, 250), + (lazy_fixture("modis_l1b_nasa_mod02qkm_file"), + False, True, True, 250), + (lazy_fixture("modis_l1b_nasa_1km_mod03_files"), + True, True, True, 250), ] ) def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, default_res): """Test that longitude and latitude datasets are loaded correctly.""" - scene = Scene(reader='modis_l1b', filenames=input_files) + scene = Scene(reader="modis_l1b", filenames=input_files) shape_5km = _shape_for_resolution(5000) shape_500m = _shape_for_resolution(500) shape_250m = _shape_for_resolution(250) default_shape = _shape_for_resolution(default_res) scheduler = CustomScheduler(max_computes=1 + has_5km + has_500 + has_250) - with dask.config.set({'scheduler': scheduler, 'array.chunk-size': '1 MiB'}): + with dask.config.set({"scheduler": scheduler, "array.chunk-size": "1 MiB"}): _load_and_check_geolocation(scene, "*", default_res, default_shape, True) _load_and_check_geolocation(scene, 5000, 5000, shape_5km, has_5km) _load_and_check_geolocation(scene, 500, 500, shape_500m, has_500) @@ -168,38 +168,38 @@ def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, d def test_load_sat_zenith_angle(self, modis_l1b_nasa_mod021km_file): """Test loading satellite zenith angle band.""" - scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file) - dataset_name = 'satellite_zenith_angle' - with dask.config.set({'array.chunk-size': '1 MiB'}): + scene = Scene(reader="modis_l1b", filenames=modis_l1b_nasa_mod021km_file) + dataset_name = "satellite_zenith_angle" + with dask.config.set({"array.chunk-size": "1 MiB"}): scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset.shape == _shape_for_resolution(1000) - assert dataset.attrs['resolution'] == 1000 + assert dataset.attrs["resolution"] == 1000 _check_shared_metadata(dataset) def test_load_vis(self, modis_l1b_nasa_mod021km_file): """Test loading visible band.""" - scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file) - dataset_name = '1' - with dask.config.set({'array.chunk-size': '1 MiB'}): + scene = Scene(reader="modis_l1b", filenames=modis_l1b_nasa_mod021km_file) + dataset_name = "1" + with dask.config.set({"array.chunk-size": "1 MiB"}): scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset[0, 0] == 300.0 assert dataset.shape == _shape_for_resolution(1000) - assert dataset.attrs['resolution'] == 1000 + assert dataset.attrs["resolution"] == 1000 _check_shared_metadata(dataset) @pytest.mark.parametrize("mask_saturated", [False, True]) def test_load_vis_saturation(self, mask_saturated, modis_l1b_nasa_mod021km_file): """Test loading visible band.""" - scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file, + scene = Scene(reader="modis_l1b", filenames=modis_l1b_nasa_mod021km_file, reader_kwargs={"mask_saturated": mask_saturated}) - dataset_name = '2' - with dask.config.set({'array.chunk-size': '1 MiB'}): + dataset_name = "2" + with dask.config.set({"array.chunk-size": "1 MiB"}): scene.load([dataset_name]) dataset = scene[dataset_name] assert dataset.shape == _shape_for_resolution(1000) - assert dataset.attrs['resolution'] == 1000 + assert dataset.attrs["resolution"] == 1000 _check_shared_metadata(dataset) # check saturation fill values diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py index 222f365d87..8876decb59 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py @@ -46,11 +46,11 @@ def _check_shared_metadata(data_arr, expect_area=False): assert data_arr.attrs["platform_name"] == "EOS-Terra" assert "rows_per_scan" in data_arr.attrs assert isinstance(data_arr.attrs["rows_per_scan"], int) - assert data_arr.attrs['reader'] == 'modis_l2' + assert data_arr.attrs["reader"] == "modis_l2" if expect_area: - assert data_arr.attrs.get('area') is not None + assert data_arr.attrs.get("area") is not None else: - assert 'area' not in data_arr.attrs + assert "area" not in data_arr.attrs class TestModisL2: @@ -58,28 +58,28 @@ class TestModisL2: def test_available_reader(self): """Test that MODIS L2 reader is available.""" - assert 'modis_l2' in available_readers() + assert "modis_l2" in available_readers() def test_scene_available_datasets(self, modis_l2_nasa_mod35_file): """Test that datasets are available.""" - scene = Scene(reader='modis_l2', filenames=modis_l2_nasa_mod35_file) + scene = Scene(reader="modis_l2", filenames=modis_l2_nasa_mod35_file) available_datasets = scene.all_dataset_names() assert len(available_datasets) > 0 - assert 'cloud_mask' in available_datasets - assert 'latitude' in available_datasets - assert 'longitude' in available_datasets + assert "cloud_mask" in available_datasets + assert "latitude" in available_datasets + assert "longitude" in available_datasets @pytest.mark.parametrize( - ('input_files', 'has_5km', 'has_500', 'has_250', 'default_res'), + ("input_files", "has_5km", "has_500", "has_250", "default_res"), [ - [lazy_fixture('modis_l2_nasa_mod35_file'), - True, False, False, 1000], + (lazy_fixture("modis_l2_nasa_mod35_file"), + True, False, False, 1000), ] ) def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, default_res): """Test that longitude and latitude datasets are loaded correctly.""" from .test_modis_l1b import _load_and_check_geolocation - scene = Scene(reader='modis_l2', filenames=input_files) + scene = Scene(reader="modis_l2", filenames=input_files) shape_5km = _shape_for_resolution(5000) shape_500m = _shape_for_resolution(500) shape_250m = _shape_for_resolution(250) @@ -96,8 +96,8 @@ def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, d def test_load_quality_assurance(self, modis_l2_nasa_mod35_file): """Test loading quality assurance.""" - scene = Scene(reader='modis_l2', filenames=modis_l2_nasa_mod35_file) - dataset_name = 'quality_assurance' + scene = Scene(reader="modis_l2", filenames=modis_l2_nasa_mod35_file) + dataset_name = "quality_assurance" scene.load([dataset_name]) quality_assurance_id = make_dataid(name=dataset_name, resolution=1000) assert quality_assurance_id in scene @@ -106,19 +106,19 @@ def test_load_quality_assurance(self, modis_l2_nasa_mod35_file): _check_shared_metadata(quality_assurance, expect_area=True) @pytest.mark.parametrize( - ('input_files', 'loadables', 'request_resolution', 'exp_resolution', 'exp_area'), + ("input_files", "loadables", "request_resolution", "exp_resolution", "exp_area"), [ - [lazy_fixture('modis_l2_nasa_mod35_mod03_files'), + (lazy_fixture("modis_l2_nasa_mod35_mod03_files"), ["cloud_mask"], - 1000, 1000, True], - [lazy_fixture('modis_l2_imapp_mask_byte1_geo_files'), + 1000, 1000, True), + (lazy_fixture("modis_l2_imapp_mask_byte1_geo_files"), ["cloud_mask", "land_sea_mask", "snow_ice_mask"], - None, 1000, True], + None, 1000, True), ] ) def test_load_category_dataset(self, input_files, loadables, request_resolution, exp_resolution, exp_area): """Test loading category products.""" - scene = Scene(reader='modis_l2', filenames=input_files) + scene = Scene(reader="modis_l2", filenames=input_files) kwargs = {"resolution": request_resolution} if request_resolution is not None else {} scene.load(loadables, **kwargs) for ds_name in loadables: @@ -129,23 +129,23 @@ def test_load_category_dataset(self, input_files, loadables, request_resolution, cat_data_arr = cat_data_arr.compute() assert cat_data_arr.shape == _shape_for_resolution(exp_resolution) assert cat_data_arr.values[0, 0] == 0.0 - assert cat_data_arr.attrs.get('resolution') == exp_resolution + assert cat_data_arr.attrs.get("resolution") == exp_resolution # mask variables should be integers assert np.issubdtype(cat_data_arr.dtype, np.integer) - assert cat_data_arr.attrs.get('_FillValue') is not None + assert cat_data_arr.attrs.get("_FillValue") is not None _check_shared_metadata(cat_data_arr, expect_area=exp_area) @pytest.mark.parametrize( - ('input_files', 'exp_area'), + ("input_files", "exp_area"), [ - [lazy_fixture('modis_l2_nasa_mod35_file'), False], - [lazy_fixture('modis_l2_nasa_mod35_mod03_files'), True], + (lazy_fixture("modis_l2_nasa_mod35_file"), False), + (lazy_fixture("modis_l2_nasa_mod35_mod03_files"), True), ] ) def test_load_250m_cloud_mask_dataset(self, input_files, exp_area): """Test loading 250m cloud mask.""" - scene = Scene(reader='modis_l2', filenames=input_files) - dataset_name = 'cloud_mask' + scene = Scene(reader="modis_l2", filenames=input_files) + dataset_name = "cloud_mask" scene.load([dataset_name], resolution=250) cloud_mask_id = make_dataid(name=dataset_name, resolution=250) assert cloud_mask_id in scene @@ -156,21 +156,21 @@ def test_load_250m_cloud_mask_dataset(self, input_files, exp_area): assert cloud_mask.values[0, 0] == 0.0 # mask variables should be integers assert np.issubdtype(cloud_mask.dtype, np.integer) - assert cloud_mask.attrs.get('_FillValue') is not None + assert cloud_mask.attrs.get("_FillValue") is not None _check_shared_metadata(cloud_mask, expect_area=exp_area) @pytest.mark.parametrize( - ('input_files', 'loadables', 'exp_resolution', 'exp_area', 'exp_value'), + ("input_files", "loadables", "exp_resolution", "exp_area", "exp_value"), [ - [lazy_fixture('modis_l2_nasa_mod06_file'), ["surface_pressure"], 5000, True, 4.0], + (lazy_fixture("modis_l2_nasa_mod06_file"), ["surface_pressure"], 5000, True, 4.0), # snow mask is considered a category product, factor/offset ignored - [lazy_fixture('modis_l2_imapp_snowmask_file'), ["snow_mask"], 1000, False, 1.0], - [lazy_fixture('modis_l2_imapp_snowmask_geo_files'), ["snow_mask"], 1000, True, 1.0], + (lazy_fixture("modis_l2_imapp_snowmask_file"), ["snow_mask"], 1000, False, 1.0), + (lazy_fixture("modis_l2_imapp_snowmask_geo_files"), ["snow_mask"], 1000, True, 1.0), ] ) def test_load_l2_dataset(self, input_files, loadables, exp_resolution, exp_area, exp_value): """Load and check an L2 variable.""" - scene = Scene(reader='modis_l2', filenames=input_files) + scene = Scene(reader="modis_l2", filenames=input_files) scene.load(loadables) for ds_name in loadables: assert ds_name in scene @@ -179,5 +179,5 @@ def test_load_l2_dataset(self, input_files, loadables, exp_resolution, exp_area, data_arr = data_arr.compute() assert data_arr.values[0, 0] == exp_value assert data_arr.shape == _shape_for_resolution(exp_resolution) - assert data_arr.attrs.get('resolution') == exp_resolution + assert data_arr.attrs.get("resolution") == exp_resolution _check_shared_metadata(data_arr, expect_area=exp_area) diff --git a/satpy/tests/reader_tests/test_aapp_l1b.py b/satpy/tests/reader_tests/test_aapp_l1b.py index c0f84c5a63..a9997f7a7e 100644 --- a/satpy/tests/reader_tests/test_aapp_l1b.py +++ b/satpy/tests/reader_tests/test_aapp_l1b.py @@ -36,26 +36,26 @@ class TestAAPPL1BAllChannelsPresent(unittest.TestCase): def setUp(self): """Set up the test case.""" self._header = np.zeros(1, dtype=_HEADERTYPE) - self._header['satid'][0] = 13 - self._header['radtempcnv'][0] = [[267194, -171669, 1002811], + self._header["satid"][0] = 13 + self._header["radtempcnv"][0] = [[267194, -171669, 1002811], [930310, -59084, 1001600], [828600, -37854, 1001147]] # first 3b is off, 3a is on - self._header['inststat1'][0] = 0b1111011100000000 + self._header["inststat1"][0] = 0b1111011100000000 # switch 3a off at position 1 - self._header['statchrecnb'][0] = 1 + self._header["statchrecnb"][0] = 1 # 3b is on, 3a is off - self._header['inststat2'][0] = 0b1111101100000000 + self._header["inststat2"][0] = 0b1111101100000000 self._data = np.zeros(3, dtype=_SCANTYPE) - self._data['scnlinyr'][:] = 2020 - self._data['scnlindy'][:] = 8 - self._data['scnlintime'][0] = 30195225 - self._data['scnlintime'][1] = 30195389 - self._data['scnlintime'][2] = 30195556 - self._data['scnlinbit'][0] = -16383 - self._data['scnlinbit'][1] = -16383 - self._data['scnlinbit'][2] = -16384 + self._data["scnlinyr"][:] = 2020 + self._data["scnlindy"][:] = 8 + self._data["scnlintime"][0] = 30195225 + self._data["scnlintime"][1] = 30195389 + self._data["scnlintime"][2] = 30195556 + self._data["scnlinbit"][0] = -16383 + self._data["scnlinbit"][1] = -16383 + self._data["scnlinbit"][2] = -16384 calvis = np.array([[[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [543489984, -21941870, 1592440064, -545027008, 499]], @@ -65,8 +65,8 @@ def setUp(self): [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [257550000, -10449420, 1812019968, -785690304, 499]]]) - self._data['calvis'][:] = calvis - self._data['calir'] = [[[[0, -2675, 2655265], + self._data["calvis"][:] = calvis + self._data["calir"] = [[[[0, -2675, 2655265], [0, 0, 0]], [[33605, -260786, 226818992], [0, 0, 0]], @@ -84,13 +84,13 @@ def setUp(self): [0, 0, 0]], [[13871, -249531, 234652640], [0, 0, 0]]]] - self._data['hrpt'] = np.ones_like(self._data['hrpt']) * (np.arange(2048) // 2)[np.newaxis, :, np.newaxis] + self._data["hrpt"] = np.ones_like(self._data["hrpt"]) * (np.arange(2048) // 2)[np.newaxis, :, np.newaxis] - self.filename_info = {'platform_shortname': 'metop03', 'start_time': datetime.datetime(2020, 1, 8, 8, 19), - 'orbit_number': 6071} - self.filetype_info = {'file_reader': AVHRRAAPPL1BFile, + self.filename_info = {"platform_shortname": "metop03", "start_time": datetime.datetime(2020, 1, 8, 8, 19), + "orbit_number": 6071} + self.filetype_info = {"file_reader": AVHRRAAPPL1BFile, 'file_patterns': ['hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1b'], # noqa - 'file_type': 'avhrr_aapp_l1b'} + "file_type": "avhrr_aapp_l1b"} def test_read(self): """Test the reading.""" @@ -103,22 +103,22 @@ def test_read(self): info = {} mins = [] maxs = [] - for name in ['1', '2', '3a']: - key = make_dataid(name=name, calibration='reflectance') + for name in ["1", "2", "3a"]: + key = make_dataid(name=name, calibration="reflectance") res = fh.get_dataset(key, info) assert res.min() == 0 assert res.max() >= 100 mins.append(res.min().values) maxs.append(res.max().values) - if name == '3a': + if name == "3a": assert np.all(np.isnan(res[:2, :])) - for name in ['3b', '4', '5']: - key = make_dataid(name=name, calibration='reflectance') + for name in ["3b", "4", "5"]: + key = make_dataid(name=name, calibration="reflectance") res = fh.get_dataset(key, info) mins.append(res.min().values) maxs.append(res.max().values) - if name == '3b': + if name == "3b": assert np.all(np.isnan(res[2:, :])) np.testing.assert_allclose(mins, [0., 0., 0., 204.10106939, 103.23477235, 106.42609758]) @@ -134,7 +134,7 @@ def test_angles(self): fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} - key = make_dataid(name='solar_zenith_angle') + key = make_dataid(name="solar_zenith_angle") res = fh.get_dataset(key, info) assert np.all(res == 0) @@ -147,10 +147,10 @@ def test_navigation(self): fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} - key = make_dataid(name='longitude') + key = make_dataid(name="longitude") res = fh.get_dataset(key, info) assert np.all(res == 0) - key = make_dataid(name='latitude') + key = make_dataid(name="latitude") res = fh.get_dataset(key, info) assert np.all(res == 0) @@ -218,9 +218,9 @@ def test_interpolation(self): fh._get_coordinates_in_degrees.return_value = (lons40km, lats40km) (lons, lats) = fh._get_all_interpolated_coordinates() lon_data = lons.compute() - self.assertTrue(np.max(lon_data) <= 180) + assert (np.max(lon_data) <= 180) # Not longitdes between -110, 110 in indata - self.assertTrue(np.all(np.abs(lon_data) > 110)) + assert np.all(np.abs(lon_data) > 110) def test_interpolation_angles(self): """Test reading the lon and lats.""" @@ -276,8 +276,8 @@ def test_interpolation_angles(self): fh._get_tiepoint_angles_in_degrees = mock.MagicMock() fh._get_tiepoint_angles_in_degrees.return_value = (sunz40km, satz40km, azidiff40km) (sunz, satz, azidiff) = fh._get_all_interpolated_angles() - self.assertTrue(np.max(sunz) <= 123) - self.assertTrue(np.max(satz) <= 70) + assert (np.max(sunz) <= 123) + assert (np.max(satz) <= 70) class TestAAPPL1BChannel3AMissing(unittest.TestCase): @@ -286,25 +286,25 @@ class TestAAPPL1BChannel3AMissing(unittest.TestCase): def setUp(self): """Set up the test case.""" self._header = np.zeros(1, dtype=_HEADERTYPE) - self._header['satid'][0] = 13 - self._header['radtempcnv'][0] = [[267194, -171669, 1002811], + self._header["satid"][0] = 13 + self._header["radtempcnv"][0] = [[267194, -171669, 1002811], [930310, -59084, 1001600], [828600, -37854, 1001147]] # first 3a is off, 3b is on - self._header['inststat1'][0] = 0b1111011100000000 + self._header["inststat1"][0] = 0b1111011100000000 # valid for the whole pass - self._header['statchrecnb'][0] = 0 - self._header['inststat2'][0] = 0b0 + self._header["statchrecnb"][0] = 0 + self._header["inststat2"][0] = 0b0 self._data = np.zeros(3, dtype=_SCANTYPE) - self._data['scnlinyr'][:] = 2020 - self._data['scnlindy'][:] = 8 - self._data['scnlintime'][0] = 30195225 - self._data['scnlintime'][1] = 30195389 - self._data['scnlintime'][2] = 30195556 - self._data['scnlinbit'][0] = -16383 - self._data['scnlinbit'][1] = -16383 - self._data['scnlinbit'][2] = -16383 + self._data["scnlinyr"][:] = 2020 + self._data["scnlindy"][:] = 8 + self._data["scnlintime"][0] = 30195225 + self._data["scnlintime"][1] = 30195389 + self._data["scnlintime"][2] = 30195556 + self._data["scnlinbit"][0] = -16383 + self._data["scnlinbit"][1] = -16383 + self._data["scnlinbit"][2] = -16383 calvis = np.array([[[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [543489984, -21941870, 1592440064, -545027008, 499]], @@ -314,8 +314,8 @@ def setUp(self): [[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [257550000, -10449420, 1812019968, -785690304, 499]]]) - self._data['calvis'][:] = calvis - self._data['calir'] = [[[[0, -2675, 2655265], + self._data["calvis"][:] = calvis + self._data["calir"] = [[[[0, -2675, 2655265], [0, 0, 0]], [[33605, -260786, 226818992], [0, 0, 0]], @@ -333,15 +333,15 @@ def setUp(self): [0, 0, 0]], [[13871, -249531, 234652640], [0, 0, 0]]]] - self._data['hrpt'] = np.ones_like(self._data['hrpt']) * (np.arange(2048) // 2)[np.newaxis, :, np.newaxis] + self._data["hrpt"] = np.ones_like(self._data["hrpt"]) * (np.arange(2048) // 2)[np.newaxis, :, np.newaxis] - self.filename_info = {'platform_shortname': 'metop03', 'start_time': datetime.datetime(2020, 1, 8, 8, 19), - 'orbit_number': 6071} - self.filetype_info = {'file_reader': AVHRRAAPPL1BFile, - 'file_patterns': [ - 'hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1b'], + self.filename_info = {"platform_shortname": "metop03", "start_time": datetime.datetime(2020, 1, 8, 8, 19), + "orbit_number": 6071} + self.filetype_info = {"file_reader": AVHRRAAPPL1BFile, + "file_patterns": [ + "hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1b"], # noqa - 'file_type': 'avhrr_aapp_l1b'} + "file_type": "avhrr_aapp_l1b"} def test_loading_missing_channels_returns_none(self): """Test that loading a missing channel raises a keyerror.""" @@ -352,7 +352,7 @@ def test_loading_missing_channels_returns_none(self): fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) info = {} - key = make_dataid(name='3a', calibration='reflectance') + key = make_dataid(name="3a", calibration="reflectance") assert fh.get_dataset(key, info) is None def test_available_datasets_miss_3a(self): @@ -363,16 +363,16 @@ def test_available_datasets_miss_3a(self): self._data.tofile(tmpfile) fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) - configured_datasets = [[None, {'name': '1'}], - [None, {'name': '2'}], - [None, {'name': '3a'}], - [None, {'name': '3b'}], - [None, {'name': '4'}], - [None, {'name': '5'}], + configured_datasets = [[None, {"name": "1"}], + [None, {"name": "2"}], + [None, {"name": "3a"}], + [None, {"name": "3b"}], + [None, {"name": "4"}], + [None, {"name": "5"}], ] available_datasets = fh.available_datasets(configured_datasets) for status, mda in available_datasets: - if mda['name'] == '3a': + if mda["name"] == "3a": assert status is False else: assert status is True @@ -397,9 +397,9 @@ def setUp(self): [[18214, -200932, 182150896], [0, 0, 0]], [[6761, -200105, 192092496], [0, 0, 0]]], dtype=" 0 def test_is_valid_time(self): """Test that valid times are correctly identified.""" - assert AHIHSDFileHandler._is_valid_timeline(FAKE_BASIC_INFO['observation_timeline']) - assert not AHIHSDFileHandler._is_valid_timeline('65526') + assert AHIHSDFileHandler._is_valid_timeline(FAKE_BASIC_INFO["observation_timeline"]) + assert not AHIHSDFileHandler._is_valid_timeline("65526") def test_time_rounding(self): """Test rounding of the nominal time.""" mocker = mock.MagicMock() in_date = datetime(2020, 1, 1, 12, 0, 0) - with mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._is_valid_timeline', mocker): + with mock.patch("satpy.readers.ahi_hsd.AHIHSDFileHandler._is_valid_timeline", mocker): with _fake_hsd_handler() as fh: mocker.return_value = True assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 3, 0, 0) @@ -478,31 +478,31 @@ def test_time_rounding(self): class TestAHICalibration(unittest.TestCase): """Test case for various AHI calibration types.""" - @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler.__init__', + @mock.patch("satpy.readers.ahi_hsd.AHIHSDFileHandler.__init__", return_value=None) def setUp(self, *mocks): """Create fake data for testing.""" self.def_cali = [-0.0037, 15.20] self.upd_cali = [-0.0074, 30.40] self.bad_cali = [0.0, 0.0] - fh = AHIHSDFileHandler(filetype_info={'file_type': 'hsd_b01'}) - fh.calib_mode = 'NOMINAL' + fh = AHIHSDFileHandler(filetype_info={"file_type": "hsd_b01"}) + fh.calib_mode = "NOMINAL" fh.user_calibration = None fh.is_zipped = False fh._header = { - 'block5': {'band_number': [5], - 'gain_count2rad_conversion': [self.def_cali[0]], - 'offset_count2rad_conversion': [self.def_cali[1]], - 'central_wave_length': [10.4073], }, - 'calibration': {'coeff_rad2albedo_conversion': [0.0019255], - 'speed_of_light': [299792458.0], - 'planck_constant': [6.62606957e-34], - 'boltzmann_constant': [1.3806488e-23], - 'c0_rad2tb_conversion': [-0.116127314574], - 'c1_rad2tb_conversion': [1.00099153832], - 'c2_rad2tb_conversion': [-1.76961091571e-06], - 'cali_gain_count2rad_conversion': [self.upd_cali[0]], - 'cali_offset_count2rad_conversion': [self.upd_cali[1]]}, + "block5": {"band_number": [5], + "gain_count2rad_conversion": [self.def_cali[0]], + "offset_count2rad_conversion": [self.def_cali[1]], + "central_wave_length": [10.4073], }, + "calibration": {"coeff_rad2albedo_conversion": [0.0019255], + "speed_of_light": [299792458.0], + "planck_constant": [6.62606957e-34], + "boltzmann_constant": [1.3806488e-23], + "c0_rad2tb_conversion": [-0.116127314574], + "c1_rad2tb_conversion": [1.00099153832], + "c2_rad2tb_conversion": [-1.76961091571e-06], + "cali_gain_count2rad_conversion": [self.upd_cali[0]], + "cali_offset_count2rad_conversion": [self.upd_cali[1]]}, } self.counts = da.array(np.array([[0, 1000], @@ -513,97 +513,95 @@ def test_default_calibrate(self, *mocks): """Test default in-file calibration modes.""" self.setUp() # Counts - self.assertEqual(self.fh.calibrate(data=123, - calibration='counts'), - 123) + assert self.fh.calibrate(data=123, calibration="counts") == 123 # Radiance rad_exp = np.array([[15.2, 11.5], [7.8, -3.3]]) rad = self.fh.calibrate(data=self.counts, - calibration='radiance') - self.assertTrue(np.allclose(rad, rad_exp)) + calibration="radiance") + assert np.allclose(rad, rad_exp) # Brightness Temperature bt_exp = np.array([[330.978979, 310.524688], [285.845017, np.nan]]) bt = self.fh.calibrate(data=self.counts, - calibration='brightness_temperature') + calibration="brightness_temperature") np.testing.assert_allclose(bt, bt_exp) # Reflectance refl_exp = np.array([[2.92676, 2.214325], [1.50189, 0.]]) refl = self.fh.calibrate(data=self.counts, - calibration='reflectance') - self.assertTrue(np.allclose(refl, refl_exp)) + calibration="reflectance") + assert np.allclose(refl, refl_exp) def test_updated_calibrate(self): """Test updated in-file calibration modes.""" # Standard operation - self.fh.calib_mode = 'UPDATE' + self.fh.calib_mode = "UPDATE" rad_exp = np.array([[30.4, 23.0], [15.6, -6.6]]) - rad = self.fh.calibrate(data=self.counts, calibration='radiance') - self.assertTrue(np.allclose(rad, rad_exp)) + rad = self.fh.calibrate(data=self.counts, calibration="radiance") + assert np.allclose(rad, rad_exp) # Case for no updated calibration available (older data) self.fh._header = { - 'block5': {'band_number': [5], - 'gain_count2rad_conversion': [self.def_cali[0]], - 'offset_count2rad_conversion': [self.def_cali[1]], - 'central_wave_length': [10.4073], }, - 'calibration': {'coeff_rad2albedo_conversion': [0.0019255], - 'speed_of_light': [299792458.0], - 'planck_constant': [6.62606957e-34], - 'boltzmann_constant': [1.3806488e-23], - 'c0_rad2tb_conversion': [-0.116127314574], - 'c1_rad2tb_conversion': [1.00099153832], - 'c2_rad2tb_conversion': [-1.76961091571e-06], - 'cali_gain_count2rad_conversion': [self.bad_cali[0]], - 'cali_offset_count2rad_conversion': [self.bad_cali[1]]}, + "block5": {"band_number": [5], + "gain_count2rad_conversion": [self.def_cali[0]], + "offset_count2rad_conversion": [self.def_cali[1]], + "central_wave_length": [10.4073], }, + "calibration": {"coeff_rad2albedo_conversion": [0.0019255], + "speed_of_light": [299792458.0], + "planck_constant": [6.62606957e-34], + "boltzmann_constant": [1.3806488e-23], + "c0_rad2tb_conversion": [-0.116127314574], + "c1_rad2tb_conversion": [1.00099153832], + "c2_rad2tb_conversion": [-1.76961091571e-06], + "cali_gain_count2rad_conversion": [self.bad_cali[0]], + "cali_offset_count2rad_conversion": [self.bad_cali[1]]}, } - rad = self.fh.calibrate(data=self.counts, calibration='radiance') + rad = self.fh.calibrate(data=self.counts, calibration="radiance") rad_exp = np.array([[15.2, 11.5], [7.8, -3.3]]) - self.assertTrue(np.allclose(rad, rad_exp)) + assert np.allclose(rad, rad_exp) def test_user_calibration(self): """Test user-defined calibration modes.""" # This is for radiance correction - self.fh.user_calibration = {'B13': {'slope': 0.95, - 'offset': -0.1}} - self.fh.band_name = 'B13' - rad = self.fh.calibrate(data=self.counts, calibration='radiance') + self.fh.user_calibration = {"B13": {"slope": 0.95, + "offset": -0.1}} + self.fh.band_name = "B13" + rad = self.fh.calibrate(data=self.counts, calibration="radiance") rad_np = rad.compute() assert rad.dtype == rad_np.dtype assert rad.dtype == np.float32 rad_exp = np.array([[16.10526316, 12.21052632], [8.31578947, -3.36842105]]) - self.assertTrue(np.allclose(rad, rad_exp)) + assert np.allclose(rad, rad_exp) # This is for DN calibration - self.fh.user_calibration = {'B13': {'slope': -0.0032, - 'offset': 15.20}, - 'type': 'DN'} - self.fh.band_name = 'B13' - rad = self.fh.calibrate(data=self.counts, calibration='radiance') + self.fh.user_calibration = {"B13": {"slope": -0.0032, + "offset": 15.20}, + "type": "DN"} + self.fh.band_name = "B13" + rad = self.fh.calibrate(data=self.counts, calibration="radiance") rad_np = rad.compute() assert rad.dtype == rad_np.dtype assert rad.dtype == np.float32 rad_exp = np.array([[15.2, 12.], [8.8, -0.8]]) - self.assertTrue(np.allclose(rad, rad_exp)) + assert np.allclose(rad, rad_exp) @contextlib.contextmanager def _fake_hsd_handler(fh_kwargs=None): """Create a test file handler.""" m = mock.mock_open() - with mock.patch('satpy.readers.ahi_hsd.np.fromfile', _custom_fromfile), \ - mock.patch('satpy.readers.ahi_hsd.unzip_file', mock.MagicMock(side_effect=_new_unzip)), \ - mock.patch('satpy.readers.ahi_hsd.open', m, create=True): - in_fname = 'test_file.bz2' + with mock.patch("satpy.readers.ahi_hsd.np.fromfile", _custom_fromfile), \ + mock.patch("satpy.readers.ahi_hsd.unzip_file", mock.MagicMock(side_effect=_new_unzip)), \ + mock.patch("satpy.readers.ahi_hsd.open", m, create=True): + in_fname = "test_file.bz2" fh = _create_fake_file_handler(in_fname, fh_kwargs=fh_kwargs) yield fh @@ -655,14 +653,14 @@ def _custom_fromfile(*args, **kwargs): def _create_fake_file_handler(in_fname, filename_info=None, filetype_info=None, fh_kwargs=None): if filename_info is None: - filename_info = {'segment': 8, 'total_segments': 10} + filename_info = {"segment": 8, "total_segments": 10} if filetype_info is None: - filetype_info = {'file_type': 'hsd_b01'} + filetype_info = {"file_type": "hsd_b01"} if fh_kwargs is None: fh_kwargs = {} fh = AHIHSDFileHandler(in_fname, filename_info, filetype_info, **fh_kwargs) # Check that the filename is altered and 2 digit segment prefix added for bz2 format files assert in_fname != fh.filename - assert str(filename_info['segment']).zfill(2) == fh.filename[0:2] + assert str(filename_info["segment"]).zfill(2) == fh.filename[0:2] return fh diff --git a/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py b/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py index e4ef6ec72f..05abef600b 100644 --- a/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py +++ b/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py @@ -35,65 +35,65 @@ class TestAHIGriddedArea(unittest.TestCase): def setUp(self): """Create fake data for testing.""" - self.FULLDISK_SIZES = {0.005: {'x_size': 24000, - 'y_size': 24000}, - 0.01: {'x_size': 12000, - 'y_size': 12000}, - 0.02: {'x_size': 6000, - 'y_size': 6000}} + self.FULLDISK_SIZES = {0.005: {"x_size": 24000, + "y_size": 24000}, + 0.01: {"x_size": 12000, + "y_size": 12000}, + 0.02: {"x_size": 6000, + "y_size": 6000}} self.AHI_FULLDISK_EXTENT = [85., -60., 205., 60.] @staticmethod - def make_fh(filetype, area='fld'): + def make_fh(filetype, area="fld"): """Create a test file handler.""" m = mock.mock_open() - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): - fh = AHIGriddedFileHandler('somefile', - {'area': area}, - filetype_info={'file_type': filetype}) + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): + fh = AHIGriddedFileHandler("somefile", + {"area": area}, + filetype_info={"file_type": filetype}) return fh def test_low_res(self): """Check size of the low resolution (2km) grid.""" - tmp_fh = self.make_fh('tir.01') - self.assertEqual(self.FULLDISK_SIZES[0.02]['x_size'], tmp_fh.ncols) - self.assertEqual(self.FULLDISK_SIZES[0.02]['y_size'], tmp_fh.nlines) + tmp_fh = self.make_fh("tir.01") + assert self.FULLDISK_SIZES[0.02]["x_size"] == tmp_fh.ncols + assert self.FULLDISK_SIZES[0.02]["y_size"] == tmp_fh.nlines def test_med_res(self): """Check size of the low resolution (1km) grid.""" - tmp_fh = self.make_fh('vis.02') - self.assertEqual(self.FULLDISK_SIZES[0.01]['x_size'], tmp_fh.ncols) - self.assertEqual(self.FULLDISK_SIZES[0.01]['y_size'], tmp_fh.nlines) + tmp_fh = self.make_fh("vis.02") + assert self.FULLDISK_SIZES[0.01]["x_size"] == tmp_fh.ncols + assert self.FULLDISK_SIZES[0.01]["y_size"] == tmp_fh.nlines def test_hi_res(self): """Check size of the low resolution (0.5km) grid.""" - tmp_fh = self.make_fh('ext.01') - self.assertEqual(self.FULLDISK_SIZES[0.005]['x_size'], tmp_fh.ncols) - self.assertEqual(self.FULLDISK_SIZES[0.005]['y_size'], tmp_fh.nlines) + tmp_fh = self.make_fh("ext.01") + assert self.FULLDISK_SIZES[0.005]["x_size"] == tmp_fh.ncols + assert self.FULLDISK_SIZES[0.005]["y_size"] == tmp_fh.nlines def test_area_def(self): """Check that a valid full disk area is produced.""" - good_area = AreaDefinition('gridded_himawari', - 'A gridded Himawari area', - 'longlat', - 'EPSG:4326', - self.FULLDISK_SIZES[0.01]['x_size'], - self.FULLDISK_SIZES[0.01]['y_size'], + good_area = AreaDefinition("gridded_himawari", + "A gridded Himawari area", + "longlat", + "EPSG:4326", + self.FULLDISK_SIZES[0.01]["x_size"], + self.FULLDISK_SIZES[0.01]["y_size"], self.AHI_FULLDISK_EXTENT) - tmp_fh = self.make_fh('vis.01') + tmp_fh = self.make_fh("vis.01") tmp_fh.get_area_def(None) - self.assertEqual(tmp_fh.area, good_area) + assert tmp_fh.area == good_area def test_bad_area(self): """Ensure an error is raised for an usupported area.""" - tmp_fh = self.make_fh('ext.01') - tmp_fh.areaname = 'scanning' + tmp_fh = self.make_fh("ext.01") + tmp_fh.areaname = "scanning" with self.assertRaises(NotImplementedError): tmp_fh.get_area_def(None) with self.assertRaises(NotImplementedError): - self.make_fh('ext.01', area='scanning') + self.make_fh("ext.01", area="scanning") class TestAHIGriddedFileCalibration(unittest.TestCase): @@ -102,16 +102,16 @@ class TestAHIGriddedFileCalibration(unittest.TestCase): def setUp(self): """Create a test file handler.""" m = mock.mock_open() - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): - in_fname = 'test_file' + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): + in_fname = "test_file" fh = AHIGriddedFileHandler(in_fname, - {'area': 'fld'}, - filetype_info={'file_type': 'tir.01'}) + {"area": "fld"}, + filetype_info={"file_type": "tir.01"}) self.fh = fh - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._get_luts') - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.os.path.exists') - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.np.loadtxt') + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._get_luts") + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.os.path.exists") + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.np.loadtxt") def test_calibrate(self, np_loadtxt, os_exist, get_luts): """Test the calibration modes of AHI using the LUTs.""" load_return = np.squeeze(np.dstack([np.arange(0, 2048, 1), @@ -128,26 +128,26 @@ def test_calibrate(self, np_loadtxt, os_exist, get_luts): os_exist.return_value = False # Check that the LUT download is called if we don't have the LUTS - self.fh.calibrate(in_data, 'reflectance') + self.fh.calibrate(in_data, "reflectance") get_luts.assert_called() os_exist.return_value = True # Ensure results equal if no calibration applied - out_data = self.fh.calibrate(in_data, 'counts') + out_data = self.fh.calibrate(in_data, "counts") np.testing.assert_equal(in_data, out_data) # Now ensure results equal if LUT calibration applied - out_data = self.fh.calibrate(in_data, 'reflectance') + out_data = self.fh.calibrate(in_data, "reflectance") np.testing.assert_allclose(refl_out, out_data) # Check that exception is raised if bad calibration is passed with self.assertRaises(NotImplementedError): - self.fh.calibrate(in_data, 'lasers') + self.fh.calibrate(in_data, "lasers") # Check that exception is raised if no file is present np_loadtxt.side_effect = FileNotFoundError with self.assertRaises(FileNotFoundError): - self.fh.calibrate(in_data, 'reflectance') + self.fh.calibrate(in_data, "reflectance") class TestAHIGriddedFileHandler(unittest.TestCase): @@ -155,44 +155,44 @@ class TestAHIGriddedFileHandler(unittest.TestCase): def new_unzip(fname): """Fake unzipping.""" - if fname[-3:] == 'bz2': + if fname[-3:] == "bz2": return fname[:-4] - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.unzip_file', + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.unzip_file", mock.MagicMock(side_effect=new_unzip)) def setUp(self): """Create a test file handler.""" m = mock.mock_open() - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): - in_fname = 'test_file.bz2' + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): + in_fname = "test_file.bz2" fh = AHIGriddedFileHandler(in_fname, - {'area': 'fld'}, - filetype_info={'file_type': 'tir.01'}) + {"area": "fld"}, + filetype_info={"file_type": "tir.01"}) # Check that the filename is altered for bz2 format files - self.assertNotEqual(in_fname, fh.filename) + assert in_fname != fh.filename self.fh = fh - key = {'calibration': 'counts', - 'name': 'vis.01'} - info = {'units': 'unitless', - 'standard_name': 'vis.01', - 'wavelength': 10.8, - 'resolution': 0.05} + key = {"calibration": "counts", + "name": "vis.01"} + info = {"units": "unitless", + "standard_name": "vis.01", + "wavelength": 10.8, + "resolution": 0.05} self.key = key self.info = info - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.np.memmap') + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.np.memmap") def test_dataread(self, memmap): """Check that a dask array is returned from the read function.""" test_arr = np.zeros((10, 10)) memmap.return_value = test_arr m = mock.mock_open() - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): res = self.fh._read_data(mock.MagicMock()) np.testing.assert_allclose(res, da.from_array(test_arr)) - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._read_data') + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._read_data") def test_get_dataset(self, mocked_read): """Check that a good dataset is returned on request.""" m = mock.mock_open() @@ -200,17 +200,17 @@ def test_get_dataset(self, mocked_read): out_data = np.array([[100., 300., 500.], [800., 1500., 2040.]]) mocked_read.return_value = out_data - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): res = self.fh.get_dataset(self.key, self.info) mocked_read.assert_called() # Check output data is correct np.testing.assert_allclose(res.values, out_data) # Also check a couple of attributes - self.assertEqual(res.attrs['name'], self.key['name']) - self.assertEqual(res.attrs['wavelength'], self.info['wavelength']) + assert res.attrs["name"] == self.key["name"] + assert res.attrs["wavelength"] == self.info["wavelength"] - @mock.patch('os.path.exists', return_value=True) - @mock.patch('os.remove') + @mock.patch("os.path.exists", return_value=True) + @mock.patch("os.remove") def test_destructor(self, exist_patch, remove_patch): """Check that file handler deletes files if needed.""" del self.fh @@ -229,27 +229,27 @@ def mocked_ftp_dl(fname): with tarfile.open(fname, "w:gz") as tar_handle: for namer in AHI_LUT_NAMES: tmpf = os.path.join(tempfile.tempdir, namer) - with open(tmpf, 'w') as tmp_fid: + with open(tmpf, "w") as tmp_fid: tmp_fid.write("TEST\n") - tar_handle.add(tmpf, arcname='count2tbb_v102/'+namer) + tar_handle.add(tmpf, arcname="count2tbb_v102/"+namer) os.remove(tmpf) def setUp(self): """Create a test file handler.""" m = mock.mock_open() - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): - in_fname = 'test_file' + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): + in_fname = "test_file" fh = AHIGriddedFileHandler(in_fname, - {'area': 'fld'}, - filetype_info={'file_type': 'tir.01'}) + {"area": "fld"}, + filetype_info={"file_type": "tir.01"}) self.fh = fh - key = {'calibration': 'counts', - 'name': 'vis.01'} - info = {'units': 'unitless', - 'standard_name': 'vis.01', - 'wavelength': 10.8, - 'resolution': 0.05} + key = {"calibration": "counts", + "name": "vis.01"} + info = {"units": "unitless", + "standard_name": "vis.01", + "wavelength": 10.8, + "resolution": 0.05} self.key = key self.info = info @@ -262,23 +262,22 @@ def tearDown(self): if os.path.isdir(self.fh.lut_dir): shutil.rmtree(self.fh.lut_dir) - @mock.patch('satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._download_luts', + @mock.patch("satpy.readers.ahi_l1b_gridded_bin.AHIGriddedFileHandler._download_luts", mock.MagicMock(side_effect=mocked_ftp_dl)) def test_get_luts(self): """Check that the function to download LUTs operates successfully.""" tempdir = tempfile.gettempdir() - print(self.fh.lut_dir) self.fh._get_luts() - self.assertFalse(os.path.exists(os.path.join(tempdir, 'count2tbb_v102/'))) + assert not os.path.exists(os.path.join(tempdir, "count2tbb_v102/")) for lut_name in AHI_LUT_NAMES: - self.assertTrue(os.path.isfile(os.path.join(self.fh.lut_dir, lut_name))) + assert os.path.isfile(os.path.join(self.fh.lut_dir, lut_name)) - @mock.patch('urllib.request.urlopen') - @mock.patch('shutil.copyfileobj') + @mock.patch("urllib.request.urlopen") + @mock.patch("shutil.copyfileobj") def test_download_luts(self, mock_dl, mock_shutil): """Test that the FTP library is called for downloading LUTS.""" m = mock.mock_open() - with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True): - self.fh._download_luts('/test_file') + with mock.patch("satpy.readers.ahi_l1b_gridded_bin.open", m, create=True): + self.fh._download_luts("/test_file") mock_dl.assert_called() mock_shutil.assert_called() diff --git a/satpy/tests/reader_tests/test_ahi_l2_nc.py b/satpy/tests/reader_tests/test_ahi_l2_nc.py index 39de4e1053..7d4050ecf0 100644 --- a/satpy/tests/reader_tests/test_ahi_l2_nc.py +++ b/satpy/tests/reader_tests/test_ahi_l2_nc.py @@ -18,7 +18,7 @@ start_time = datetime(2023, 8, 24, 5, 40, 21) end_time = datetime(2023, 8, 24, 5, 49, 40) -dimensions = {'Columns': 5500, 'Rows': 5500} +dimensions = {"Columns": 5500, "Rows": 5500} exp_ext = (-5499999.9012, -5499999.9012, 5499999.9012, 5499999.9012) @@ -30,12 +30,12 @@ } badarea_attrs = global_attrs.copy() -badarea_attrs['cdm_data_type'] = 'bad_area' +badarea_attrs["cdm_data_type"] = "bad_area" -def ahil2_filehandler(fname, platform='h09'): +def ahil2_filehandler(fname, platform="h09"): """Instantiate a Filehandler.""" - fileinfo = {'platform': platform} + fileinfo = {"platform": platform} filetype = None fh = HIML2NCFileHandler(fname, fileinfo, filetype) return fh @@ -45,9 +45,9 @@ def ahil2_filehandler(fname, platform='h09'): def himl2_filename(tmp_path_factory): """Create a fake himawari l2 file.""" fname = f'{tmp_path_factory.mktemp("data")}/AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc' - ds = xr.Dataset({'CloudMask': (['Rows', 'Columns'], clmk_data)}, - coords={'Latitude': (['Rows', 'Columns'], lat_data), - 'Longitude': (['Rows', 'Columns'], lon_data)}, + ds = xr.Dataset({"CloudMask": (["Rows", "Columns"], clmk_data)}, + coords={"Latitude": (["Rows", "Columns"], lat_data), + "Longitude": (["Rows", "Columns"], lon_data)}, attrs=global_attrs) ds.to_netcdf(fname) return fname @@ -57,9 +57,9 @@ def himl2_filename(tmp_path_factory): def himl2_filename_bad(tmp_path_factory): """Create a fake himawari l2 file.""" fname = f'{tmp_path_factory.mktemp("data")}/AHI-CMSK_v1r1_h09_s202308240540213_e202308240549407_c202308240557548.nc' - ds = xr.Dataset({'CloudMask': (['Rows', 'Columns'], clmk_data)}, - coords={'Latitude': (['Rows', 'Columns'], lat_data), - 'Longitude': (['Rows', 'Columns'], lon_data)}, + ds = xr.Dataset({"CloudMask": (["Rows", "Columns"], clmk_data)}, + coords={"Latitude": (["Rows", "Columns"], lat_data), + "Longitude": (["Rows", "Columns"], lon_data)}, attrs=badarea_attrs) ds.to_netcdf(fname) @@ -75,35 +75,35 @@ def test_startend(himl2_filename): def test_ahi_l2_area_def(himl2_filename, caplog): """Test reader handles area definition correctly.""" - ps = '+proj=geos +lon_0=140.7 +h=35785863 +x_0=0 +y_0=0 +a=6378137 +rf=298.257024882273 +units=m +no_defs +type=crs' + ps = "+proj=geos +lon_0=140.7 +h=35785863 +x_0=0 +y_0=0 +a=6378137 +rf=298.257024882273 +units=m +no_defs +type=crs" # Check case where input data is correct size. fh = ahil2_filehandler(himl2_filename) clmk_id = make_dataid(name="cloudmask") area_def = fh.get_area_def(clmk_id) - assert area_def.width == dimensions['Columns'] - assert area_def.height == dimensions['Rows'] + assert area_def.width == dimensions["Columns"] + assert area_def.height == dimensions["Rows"] assert np.allclose(area_def.area_extent, exp_ext) assert area_def.proj4_string == ps # Check case where input data is incorrect size. - with pytest.raises(ValueError): - fh = ahil2_filehandler(himl2_filename) - fh.nlines = 3000 + fh = ahil2_filehandler(himl2_filename) + fh.nlines = 3000 + with pytest.raises(ValueError, match="Input L2 file is not a full disk Himawari scene..*"): fh.get_area_def(clmk_id) def test_bad_area_name(himl2_filename_bad): """Check case where area name is not correct.""" - global_attrs['cdm_data_type'] = 'bad_area' - with pytest.raises(ValueError): + global_attrs["cdm_data_type"] = "bad_area" + with pytest.raises(ValueError, match="File is not a full disk scene"): ahil2_filehandler(himl2_filename_bad) - global_attrs['cdm_data_type'] = 'Full Disk' + global_attrs["cdm_data_type"] = "Full Disk" def test_load_data(himl2_filename): """Test that data is loaded successfully.""" fh = ahil2_filehandler(himl2_filename) clmk_id = make_dataid(name="cloudmask") - clmk = fh.get_dataset(clmk_id, {'file_key': 'CloudMask'}) + clmk = fh.get_dataset(clmk_id, {"file_key": "CloudMask"}) assert np.allclose(clmk.data, clmk_data) diff --git a/satpy/tests/reader_tests/test_ami_l1b.py b/satpy/tests/reader_tests/test_ami_l1b.py index 50f6f2af03..cdbc4468c9 100644 --- a/satpy/tests/reader_tests/test_ami_l1b.py +++ b/satpy/tests/reader_tests/test_ami_l1b.py @@ -23,6 +23,7 @@ import dask.array as da import numpy as np import xarray as xr +from pytest import approx # noqa: PT013 class FakeDataset(object): @@ -56,7 +57,7 @@ def close(self): class TestAMIL1bNetCDFBase(unittest.TestCase): """Common setup for NC_ABI_L1B tests.""" - @mock.patch('satpy.readers.ami_l1b.xr') + @mock.patch("satpy.readers.ami_l1b.xr") def setUp(self, xr_, counts=None): """Create a fake dataset using the given counts data.""" from satpy.readers.ami_l1b import AMIL1bNetCDF @@ -66,35 +67,35 @@ def setUp(self, xr_, counts=None): rad_data = (rad_data + 1.) / 0.5 rad_data = rad_data.astype(np.int16) counts = xr.DataArray( - da.from_array(rad_data, chunks='auto'), - dims=('y', 'x'), + da.from_array(rad_data, chunks="auto"), + dims=("y", "x"), attrs={ - 'channel_name': "VI006", - 'detector_side': 2, - 'number_of_total_pixels': 484000000, - 'number_of_error_pixels': 113892451, - 'max_pixel_value': 32768, - 'min_pixel_value': 6, - 'average_pixel_value': 8228.98770845248, - 'stddev_pixel_value': 13621.130386551, - 'number_of_total_bits_per_pixel': 16, - 'number_of_data_quality_flag_bits_per_pixel': 2, - 'number_of_valid_bits_per_pixel': 12, - 'data_quality_flag_meaning': + "channel_name": "VI006", + "detector_side": 2, + "number_of_total_pixels": 484000000, + "number_of_error_pixels": 113892451, + "max_pixel_value": 32768, + "min_pixel_value": 6, + "average_pixel_value": 8228.98770845248, + "stddev_pixel_value": 13621.130386551, + "number_of_total_bits_per_pixel": 16, + "number_of_data_quality_flag_bits_per_pixel": 2, + "number_of_valid_bits_per_pixel": 12, + "data_quality_flag_meaning": "0:good_pixel, 1:conditionally_usable_pixel, 2:out_of_scan_area_pixel, 3:error_pixel", - 'ground_sample_distance_ew': 1.4e-05, - 'ground_sample_distance_ns': 1.4e-05, + "ground_sample_distance_ew": 1.4e-05, + "ground_sample_distance_ns": 1.4e-05, } ) sc_position = xr.DataArray(0., attrs={ - 'sc_position_center_pixel': [-26113466.1974016, 33100139.1630508, 3943.75470244799], + "sc_position_center_pixel": [-26113466.1974016, 33100139.1630508, 3943.75470244799], }) xr_.open_dataset.return_value = FakeDataset( { - 'image_pixel_values': counts, - 'sc_position': sc_position, - 'gsics_coeff_intercept': [0.1859369], - 'gsics_coeff_slope': [0.9967594], + "image_pixel_values": counts, + "sc_position": sc_position, + "gsics_coeff_intercept": [0.1859369], + "gsics_coeff_slope": [0.9967594], }, { "satellite_name": "GK-2A", @@ -125,9 +126,9 @@ def setUp(self, xr_, counts=None): } ) - self.reader = AMIL1bNetCDF('filename', - {'platform_shortname': 'gk2a'}, - {'file_type': 'ir087'},) + self.reader = AMIL1bNetCDF("filename", + {"platform_shortname": "gk2a"}, + {"file_type": "ir087"},) class TestAMIL1bNetCDF(TestAMIL1bNetCDFBase): @@ -136,129 +137,127 @@ class TestAMIL1bNetCDF(TestAMIL1bNetCDFBase): def _check_orbital_parameters(self, orb_params): """Check that orbital parameters match expected values.""" exp_params = { - 'projection_altitude': 35785863.0, - 'projection_latitude': 0.0, - 'projection_longitude': 128.2, - 'satellite_actual_altitude': 35782654.56070405, - 'satellite_actual_latitude': 0.005364927, - 'satellite_actual_longitude': 128.2707, + "projection_altitude": 35785863.0, + "projection_latitude": 0.0, + "projection_longitude": 128.2, + "satellite_actual_altitude": 35782654.56070405, + "satellite_actual_latitude": 0.005364927, + "satellite_actual_longitude": 128.2707, } for key, val in exp_params.items(): - self.assertAlmostEqual(val, orb_params[key], places=3) + assert val == approx(orb_params[key], abs=1e-3) def test_filename_grouping(self): """Test that filenames are grouped properly.""" from satpy.readers import group_files filenames = [ - 'gk2a_ami_le1b_ir087_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_ir096_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_ir105_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_ir112_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_ir123_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_ir133_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_nr013_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_nr016_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_sw038_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_vi004_fd010ge_201909300300.nc', - 'gk2a_ami_le1b_vi005_fd010ge_201909300300.nc', - 'gk2a_ami_le1b_vi006_fd005ge_201909300300.nc', - 'gk2a_ami_le1b_vi008_fd010ge_201909300300.nc', - 'gk2a_ami_le1b_wv063_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_wv069_fd020ge_201909300300.nc', - 'gk2a_ami_le1b_wv073_fd020ge_201909300300.nc'] - groups = group_files(filenames, reader='ami_l1b') - self.assertEqual(len(groups), 1) - self.assertEqual(len(groups[0]['ami_l1b']), 16) + "gk2a_ami_le1b_ir087_fd020ge_201909300300.nc", + "gk2a_ami_le1b_ir096_fd020ge_201909300300.nc", + "gk2a_ami_le1b_ir105_fd020ge_201909300300.nc", + "gk2a_ami_le1b_ir112_fd020ge_201909300300.nc", + "gk2a_ami_le1b_ir123_fd020ge_201909300300.nc", + "gk2a_ami_le1b_ir133_fd020ge_201909300300.nc", + "gk2a_ami_le1b_nr013_fd020ge_201909300300.nc", + "gk2a_ami_le1b_nr016_fd020ge_201909300300.nc", + "gk2a_ami_le1b_sw038_fd020ge_201909300300.nc", + "gk2a_ami_le1b_vi004_fd010ge_201909300300.nc", + "gk2a_ami_le1b_vi005_fd010ge_201909300300.nc", + "gk2a_ami_le1b_vi006_fd005ge_201909300300.nc", + "gk2a_ami_le1b_vi008_fd010ge_201909300300.nc", + "gk2a_ami_le1b_wv063_fd020ge_201909300300.nc", + "gk2a_ami_le1b_wv069_fd020ge_201909300300.nc", + "gk2a_ami_le1b_wv073_fd020ge_201909300300.nc"] + groups = group_files(filenames, reader="ami_l1b") + assert len(groups) == 1 + assert len(groups[0]["ami_l1b"]) == 16 def test_basic_attributes(self): """Test getting basic file attributes.""" from datetime import datetime - self.assertEqual(self.reader.start_time, - datetime(2019, 9, 30, 3, 0, 31, 957882)) - self.assertEqual(self.reader.end_time, - datetime(2019, 9, 30, 3, 9, 35, 606133)) + assert self.reader.start_time == datetime(2019, 9, 30, 3, 0, 31, 957882) + assert self.reader.end_time == datetime(2019, 9, 30, 3, 9, 35, 606133) def test_get_dataset(self): """Test gettting radiance data.""" from satpy.tests.utils import make_dataid - key = make_dataid(name='VI006', calibration='radiance') + key = make_dataid(name="VI006", calibration="radiance") res = self.reader.get_dataset(key, { - 'file_key': 'image_pixel_values', - 'standard_name': 'toa_outgoing_radiance_per_unit_wavelength', - 'units': 'W m-2 um-1 sr-1', + "file_key": "image_pixel_values", + "standard_name": "toa_outgoing_radiance_per_unit_wavelength", + "units": "W m-2 um-1 sr-1", }) - exp = {'calibration': 'radiance', - 'modifiers': (), - 'platform_name': 'GEO-KOMPSAT-2A', - 'sensor': 'ami', - 'units': 'W m-2 um-1 sr-1'} + exp = {"calibration": "radiance", + "modifiers": (), + "platform_name": "GEO-KOMPSAT-2A", + "sensor": "ami", + "units": "W m-2 um-1 sr-1"} for key, val in exp.items(): - self.assertEqual(val, res.attrs[key]) - self._check_orbital_parameters(res.attrs['orbital_parameters']) + assert val == res.attrs[key] + self._check_orbital_parameters(res.attrs["orbital_parameters"]) def test_bad_calibration(self): """Test that asking for a bad calibration fails.""" from satpy.tests.utils import make_dataid with self.assertRaises(ValueError): - ds_id = make_dataid(name='VI006', calibration='_bad_') - ds_info = {'file_key': 'image_pixel_values', - 'standard_name': 'toa_outgoing_radiance_per_unit_wavelength', - 'units': 'W m-2 um-1 sr-1', + ds_id = make_dataid(name="VI006", calibration="_bad_") + ds_info = {"file_key": "image_pixel_values", + "standard_name": "toa_outgoing_radiance_per_unit_wavelength", + "units": "W m-2 um-1 sr-1", } self.reader.get_dataset(ds_id, ds_info) - @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') + @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def(self, adef): """Test the area generation.""" self.reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - exp = {'a': 6378137.0, 'b': 6356752.3, 'h': 35785863.0, - 'lon_0': 128.2, 'proj': 'geos', 'units': 'm'} + exp = {"a": 6378137.0, "b": 6356752.3, "h": 35785863.0, + "lon_0": 128.2, "proj": "geos", "units": "m"} for key, val in exp.items(): - self.assertIn(key, call_args[3]) - self.assertAlmostEqual(val, call_args[3][key]) - self.assertEqual(call_args[4], self.reader.nc.attrs['number_of_columns']) - self.assertEqual(call_args[5], self.reader.nc.attrs['number_of_lines']) + assert key in call_args[3] + assert val == approx(call_args[3][key]) + assert call_args[4] == self.reader.nc.attrs["number_of_columns"] + assert call_args[5] == self.reader.nc.attrs["number_of_lines"] np.testing.assert_allclose(call_args[6], [-5511022.902, -5511022.902, 5511022.902, 5511022.902]) def test_get_dataset_vis(self): """Test get visible calibrated data.""" from satpy.tests.utils import make_dataid - key = make_dataid(name='VI006', calibration='reflectance') + key = make_dataid(name="VI006", calibration="reflectance") res = self.reader.get_dataset(key, { - 'file_key': 'image_pixel_values', - 'standard_name': 'toa_bidirectional_reflectance', - 'units': '%', + "file_key": "image_pixel_values", + "standard_name": "toa_bidirectional_reflectance", + "units": "%", }) - exp = {'calibration': 'reflectance', - 'modifiers': (), - 'platform_name': 'GEO-KOMPSAT-2A', - 'sensor': 'ami', - 'units': '%'} + exp = {"calibration": "reflectance", + "modifiers": (), + "platform_name": "GEO-KOMPSAT-2A", + "sensor": "ami", + "units": "%"} for key, val in exp.items(): - self.assertEqual(val, res.attrs[key]) - self._check_orbital_parameters(res.attrs['orbital_parameters']) + assert val == res.attrs[key] + self._check_orbital_parameters(res.attrs["orbital_parameters"]) def test_get_dataset_counts(self): """Test get counts data.""" from satpy.tests.utils import make_dataid - key = make_dataid(name='VI006', calibration='counts') + key = make_dataid(name="VI006", calibration="counts") res = self.reader.get_dataset(key, { - 'file_key': 'image_pixel_values', - 'standard_name': 'counts', - 'units': '1', + "file_key": "image_pixel_values", + "standard_name": "counts", + "units": "1", }) - exp = {'calibration': 'counts', - 'modifiers': (), - 'platform_name': 'GEO-KOMPSAT-2A', - 'sensor': 'ami', - 'units': '1'} + exp = {"calibration": "counts", + "modifiers": (), + "platform_name": "GEO-KOMPSAT-2A", + "sensor": "ami", + "units": "1"} for key, val in exp.items(): - self.assertEqual(val, res.attrs[key]) - self._check_orbital_parameters(res.attrs['orbital_parameters']) + assert val == res.attrs[key] + self._check_orbital_parameters(res.attrs["orbital_parameters"]) class TestAMIL1bNetCDFIRCal(TestAMIL1bNetCDFBase): @@ -270,53 +269,53 @@ def setUp(self): count_data = (np.arange(10).reshape((2, 5))) + 7000 count_data = count_data.astype(np.uint16) count = xr.DataArray( - da.from_array(count_data, chunks='auto'), - dims=('y', 'x'), + da.from_array(count_data, chunks="auto"), + dims=("y", "x"), attrs={ - 'channel_name': "IR087", - 'detector_side': 2, - 'number_of_total_pixels': 484000000, - 'number_of_error_pixels': 113892451, - 'max_pixel_value': 32768, - 'min_pixel_value': 6, - 'average_pixel_value': 8228.98770845248, - 'stddev_pixel_value': 13621.130386551, - 'number_of_total_bits_per_pixel': 16, - 'number_of_data_quality_flag_bits_per_pixel': 2, - 'number_of_valid_bits_per_pixel': 13, - 'data_quality_flag_meaning': + "channel_name": "IR087", + "detector_side": 2, + "number_of_total_pixels": 484000000, + "number_of_error_pixels": 113892451, + "max_pixel_value": 32768, + "min_pixel_value": 6, + "average_pixel_value": 8228.98770845248, + "stddev_pixel_value": 13621.130386551, + "number_of_total_bits_per_pixel": 16, + "number_of_data_quality_flag_bits_per_pixel": 2, + "number_of_valid_bits_per_pixel": 13, + "data_quality_flag_meaning": "0:good_pixel, 1:conditionally_usable_pixel, 2:out_of_scan_area_pixel, 3:error_pixel", - 'ground_sample_distance_ew': 1.4e-05, - 'ground_sample_distance_ns': 1.4e-05, + "ground_sample_distance_ew": 1.4e-05, + "ground_sample_distance_ns": 1.4e-05, } ) - self.ds_id = make_dataid(name='IR087', wavelength=[8.415, 8.59, 8.765], - calibration='brightness_temperature') + self.ds_id = make_dataid(name="IR087", wavelength=[8.415, 8.59, 8.765], + calibration="brightness_temperature") self.ds_info = { - 'file_key': 'image_pixel_values', - 'wavelength': [8.415, 8.59, 8.765], - 'standard_name': 'toa_brightness_temperature', - 'units': 'K', + "file_key": "image_pixel_values", + "wavelength": [8.415, 8.59, 8.765], + "standard_name": "toa_brightness_temperature", + "units": "K", } super(TestAMIL1bNetCDFIRCal, self).setUp(counts=count) def test_default_calibrate(self): """Test default (pyspectral) IR calibration.""" from satpy.readers.ami_l1b import rad2temp - with mock.patch('satpy.readers.ami_l1b.rad2temp', wraps=rad2temp) as r2t_mock: + with mock.patch("satpy.readers.ami_l1b.rad2temp", wraps=rad2temp) as r2t_mock: res = self.reader.get_dataset(self.ds_id, self.ds_info) r2t_mock.assert_called_once() expected = np.array([[238.34385135, 238.31443527, 238.28500087, 238.25554813, 238.22607701], [238.1965875, 238.16707956, 238.13755317, 238.10800829, 238.07844489]]) np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True) # make sure the attributes from the file are in the data array - self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') + assert res.attrs["standard_name"] == "toa_brightness_temperature" def test_infile_calibrate(self): """Test IR calibration using in-file coefficients.""" from satpy.readers.ami_l1b import rad2temp - self.reader.calib_mode = 'FILE' - with mock.patch('satpy.readers.ami_l1b.rad2temp', wraps=rad2temp) as r2t_mock: + self.reader.calib_mode = "FILE" + with mock.patch("satpy.readers.ami_l1b.rad2temp", wraps=rad2temp) as r2t_mock: res = self.reader.get_dataset(self.ds_id, self.ds_info) r2t_mock.assert_not_called() expected = np.array([[238.34385135, 238.31443527, 238.28500087, 238.25554813, 238.22607701], @@ -324,34 +323,34 @@ def test_infile_calibrate(self): # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.04) # make sure the attributes from the file are in the data array - self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') + assert res.attrs["standard_name"] == "toa_brightness_temperature" def test_gsics_radiance_corr(self): """Test IR radiance adjustment using in-file GSICS coefs.""" from satpy.readers.ami_l1b import rad2temp - self.reader.calib_mode = 'GSICS' + self.reader.calib_mode = "GSICS" expected = np.array([[238.036797, 238.007106, 237.977396, 237.947668, 237.91792], [237.888154, 237.85837, 237.828566, 237.798743, 237.768902]]) - with mock.patch('satpy.readers.ami_l1b.rad2temp', wraps=rad2temp) as r2t_mock: + with mock.patch("satpy.readers.ami_l1b.rad2temp", wraps=rad2temp) as r2t_mock: res = self.reader.get_dataset(self.ds_id, self.ds_info) r2t_mock.assert_not_called() # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.01) # make sure the attributes from the file are in the data array - self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') + assert res.attrs["standard_name"] == "toa_brightness_temperature" def test_user_radiance_corr(self): """Test IR radiance adjustment using user-supplied coefs.""" from satpy.readers.ami_l1b import rad2temp - self.reader.calib_mode = 'FILE' - self.reader.user_calibration = {'IR087': {'slope': 0.99669, - 'offset': 0.16907}} + self.reader.calib_mode = "FILE" + self.reader.user_calibration = {"IR087": {"slope": 0.99669, + "offset": 0.16907}} expected = np.array([[238.073713, 238.044043, 238.014354, 237.984647, 237.954921], [237.925176, 237.895413, 237.865631, 237.835829, 237.806009]]) - with mock.patch('satpy.readers.ami_l1b.rad2temp', wraps=rad2temp) as r2t_mock: + with mock.patch("satpy.readers.ami_l1b.rad2temp", wraps=rad2temp) as r2t_mock: res = self.reader.get_dataset(self.ds_id, self.ds_info) r2t_mock.assert_not_called() # file coefficients are pretty close, give some wiggle room np.testing.assert_allclose(res.data.compute(), expected, equal_nan=True, atol=0.01) # make sure the attributes from the file are in the data array - self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') + assert res.attrs["standard_name"] == "toa_brightness_temperature" diff --git a/satpy/tests/reader_tests/test_amsr2_l1b.py b/satpy/tests/reader_tests/test_amsr2_l1b.py index f3e9de538f..b627a53a0b 100644 --- a/satpy/tests/reader_tests/test_amsr2_l1b.py +++ b/satpy/tests/reader_tests/test_amsr2_l1b.py @@ -43,56 +43,56 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { - '/attr/PlatformShortName': 'GCOM-W1', - '/attr/SensorShortName': 'AMSR2', - '/attr/StartOrbitNumber': '22210', - '/attr/StopOrbitNumber': '22210', + "/attr/PlatformShortName": "GCOM-W1", + "/attr/SensorShortName": "AMSR2", + "/attr/StartOrbitNumber": "22210", + "/attr/StopOrbitNumber": "22210", } for bt_chan in [ - '(10.7GHz,H)', - '(10.7GHz,V)', - '(18.7GHz,H)', - '(18.7GHz,V)', - '(23.8GHz,H)', - '(23.8GHz,V)', - '(36.5GHz,H)', - '(36.5GHz,V)', - '(6.9GHz,H)', - '(6.9GHz,V)', - '(7.3GHz,H)', - '(7.3GHz,V)', - '(89.0GHz-A,H)', - '(89.0GHz-A,V)', - '(89.0GHz-B,H)', - '(89.0GHz-B,V)', + "(10.7GHz,H)", + "(10.7GHz,V)", + "(18.7GHz,H)", + "(18.7GHz,V)", + "(23.8GHz,H)", + "(23.8GHz,V)", + "(36.5GHz,H)", + "(36.5GHz,V)", + "(6.9GHz,H)", + "(6.9GHz,V)", + "(7.3GHz,H)", + "(7.3GHz,V)", + "(89.0GHz-A,H)", + "(89.0GHz-A,V)", + "(89.0GHz-B,H)", + "(89.0GHz-B,V)", ]: - k = 'Brightness Temperature {}'.format(bt_chan) + k = "Brightness Temperature {}".format(bt_chan) file_content[k] = DEFAULT_FILE_DATA[:, ::2] - file_content[k + '/shape'] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2) - file_content[k + '/attr/UNIT'] = 'K' - file_content[k + '/attr/SCALE FACTOR'] = 0.01 + file_content[k + "/shape"] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2) + file_content[k + "/attr/UNIT"] = "K" + file_content[k + "/attr/SCALE FACTOR"] = 0.01 for bt_chan in [ - '(89.0GHz-A,H)', - '(89.0GHz-A,V)', - '(89.0GHz-B,H)', - '(89.0GHz-B,V)', + "(89.0GHz-A,H)", + "(89.0GHz-A,V)", + "(89.0GHz-B,H)", + "(89.0GHz-B,V)", ]: - k = 'Brightness Temperature {}'.format(bt_chan) + k = "Brightness Temperature {}".format(bt_chan) file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/UNIT'] = 'K' - file_content[k + '/attr/SCALE FACTOR'] = 0.01 - for nav_chan in ['89A', '89B']: - lon_k = 'Longitude of Observation Point for ' + nav_chan - lat_k = 'Latitude of Observation Point for ' + nav_chan + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/UNIT"] = "K" + file_content[k + "/attr/SCALE FACTOR"] = 0.01 + for nav_chan in ["89A", "89B"]: + lon_k = "Longitude of Observation Point for " + nav_chan + lat_k = "Latitude of Observation Point for " + nav_chan file_content[lon_k] = DEFAULT_LON_DATA - file_content[lon_k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[lon_k + '/attr/SCALE FACTOR'] = 1 - file_content[lon_k + '/attr/UNIT'] = 'deg' + file_content[lon_k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[lon_k + "/attr/SCALE FACTOR"] = 1 + file_content[lon_k + "/attr/UNIT"] = "deg" file_content[lat_k] = DEFAULT_LAT_DATA - file_content[lat_k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[lat_k + '/attr/SCALE FACTOR'] = 1 - file_content[lat_k + '/attr/UNIT'] = 'deg' + file_content[lat_k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[lat_k + "/attr/SCALE FACTOR"] = 1 + file_content[lat_k + "/attr/UNIT"] = "deg" convert_file_content_to_data_array(file_content) return file_content @@ -107,9 +107,9 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.amsr2_l1b import AMSR2L1BFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(AMSR2L1BFileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(AMSR2L1BFileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -122,71 +122,67 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5', + "GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_basic(self): """Test loading of basic channels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5', + "GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) ds = r.load([ - 'btemp_10.7v', - 'btemp_10.7h', - 'btemp_6.9v', - 'btemp_6.9h', - 'btemp_7.3v', - 'btemp_7.3h', - 'btemp_18.7v', - 'btemp_18.7h', - 'btemp_23.8v', - 'btemp_23.8h', - 'btemp_36.5v', - 'btemp_36.5h', + "btemp_10.7v", + "btemp_10.7h", + "btemp_6.9v", + "btemp_6.9h", + "btemp_7.3v", + "btemp_7.3h", + "btemp_18.7v", + "btemp_18.7h", + "btemp_23.8v", + "btemp_23.8h", + "btemp_36.5v", + "btemp_36.5h", ]) - self.assertEqual(len(ds), 12) + assert len(ds) == 12 for d in ds.values(): - self.assertEqual(d.attrs['calibration'], 'brightness_temperature') - self.assertTupleEqual(d.shape, (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1] // 2))) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) - self.assertTupleEqual(d.attrs['area'].lons.shape, - (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2)) - self.assertTupleEqual(d.attrs['area'].lats.shape, - (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2)) - assert d.attrs['sensor'] == 'amsr2' - assert d.attrs['platform_name'] == 'GCOM-W1' + assert d.attrs["calibration"] == "brightness_temperature" + assert d.shape == (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1] // 2)) + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert d.attrs["area"].lons.shape == (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2) + assert d.attrs["area"].lats.shape == (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2) + assert d.attrs["sensor"] == "amsr2" + assert d.attrs["platform_name"] == "GCOM-W1" def test_load_89ghz(self): """Test loading of 89GHz channels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5', + "GW1AM2_201607201808_128A_L1DLBTBR_1110110.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) ds = r.load([ - 'btemp_89.0av', - 'btemp_89.0ah', - 'btemp_89.0bv', - 'btemp_89.0bh', + "btemp_89.0av", + "btemp_89.0ah", + "btemp_89.0bv", + "btemp_89.0bh", ]) - self.assertEqual(len(ds), 4) + assert len(ds) == 4 for d in ds.values(): - self.assertEqual(d.attrs['calibration'], 'brightness_temperature') - self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) - self.assertTupleEqual(d.attrs['area'].lons.shape, - DEFAULT_FILE_SHAPE) - self.assertTupleEqual(d.attrs['area'].lats.shape, - DEFAULT_FILE_SHAPE) + assert d.attrs["calibration"] == "brightness_temperature" + assert d.shape == DEFAULT_FILE_SHAPE + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert d.attrs["area"].lons.shape == DEFAULT_FILE_SHAPE + assert d.attrs["area"].lats.shape == DEFAULT_FILE_SHAPE diff --git a/satpy/tests/reader_tests/test_amsr2_l2.py b/satpy/tests/reader_tests/test_amsr2_l2.py index 711754c989..7199a619bc 100644 --- a/satpy/tests/reader_tests/test_amsr2_l2.py +++ b/satpy/tests/reader_tests/test_amsr2_l2.py @@ -42,29 +42,29 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { - '/attr/PlatformShortName': 'GCOM-W1', - '/attr/SensorShortName': 'AMSR2', - '/attr/StartOrbitNumber': '22210', - '/attr/StopOrbitNumber': '22210', + "/attr/PlatformShortName": "GCOM-W1", + "/attr/SensorShortName": "AMSR2", + "/attr/StartOrbitNumber": "22210", + "/attr/StopOrbitNumber": "22210", } - k = 'Geophysical Data' + k = "Geophysical Data" file_content[k] = DEFAULT_FILE_DATA[:, :] - file_content[k + '/shape'] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) - file_content[k + '/attr/UNIT'] = 'K' - file_content[k + '/attr/SCALE FACTOR'] = 1 + file_content[k + "/shape"] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) + file_content[k + "/attr/UNIT"] = "K" + file_content[k + "/attr/SCALE FACTOR"] = 1 - k = 'Latitude of Observation Point' + k = "Latitude of Observation Point" file_content[k] = DEFAULT_FILE_DATA[:, :] - file_content[k + '/shape'] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) - file_content[k + '/attr/UNIT'] = 'deg' - file_content[k + '/attr/SCALE FACTOR'] = 1 - k = 'Longitude of Observation Point' + file_content[k + "/shape"] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) + file_content[k + "/attr/UNIT"] = "deg" + file_content[k + "/attr/SCALE FACTOR"] = 1 + k = "Longitude of Observation Point" file_content[k] = DEFAULT_FILE_DATA[:, :] - file_content[k + '/shape'] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) - file_content[k + '/attr/UNIT'] = 'deg' - file_content[k + '/attr/SCALE FACTOR'] = 1 + file_content[k + "/shape"] = (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) + file_content[k + "/attr/UNIT"] = "deg" + file_content[k + "/attr/SCALE FACTOR"] = 1 - convert_file_content_to_data_array(file_content, dims=('dim_0', 'dim_1')) + convert_file_content_to_data_array(file_content, dims=("dim_0", "dim_1")) return file_content @@ -78,9 +78,9 @@ def setUp(self): from satpy._config import config_search_paths from satpy.readers.amsr2_l1b import AMSR2L1BFileHandler from satpy.readers.amsr2_l2 import AMSR2L2FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(AMSR2L2FileHandler, '__bases__', (FakeHDF5FileHandler2, + self.p = mock.patch.object(AMSR2L2FileHandler, "__bases__", (FakeHDF5FileHandler2, AMSR2L1BFileHandler)) self.fake_handler = self.p.start() self.p.is_local = True @@ -94,29 +94,27 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GW1AM2_202004160129_195B_L2SNSSWLB3300300.h5', + "GW1AM2_202004160129_195B_L2SNSSWLB3300300.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_basic(self): """Test loading of basic channels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GW1AM2_202004160129_195B_L2SNSSWLB3300300.h5', + "GW1AM2_202004160129_195B_L2SNSSWLB3300300.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) - ds = r.load(['ssw']) - self.assertEqual(len(ds), 1) + ds = r.load(["ssw"]) + assert len(ds) == 1 for d in ds.values(): - self.assertTupleEqual(d.shape, (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1]))) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) - self.assertTupleEqual(d.attrs['area'].lons.shape, - (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1])) - self.assertTupleEqual(d.attrs['area'].lats.shape, - (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1])) + assert d.shape == (DEFAULT_FILE_SHAPE[0], int(DEFAULT_FILE_SHAPE[1])) + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert d.attrs["area"].lons.shape == (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) + assert d.attrs["area"].lats.shape == (DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]) diff --git a/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py b/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py index ac271e7206..2f1b3ad7b0 100644 --- a/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py +++ b/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py @@ -47,10 +47,10 @@ def _get_shared_global_attrs(filename): attrs = { - 'time_coverage_start': '2020-08-12T05:58:31.0Z', - 'time_coverage_end': '2020-08-12T06:07:01.0Z', - 'platform_name': 'GCOM-W1', - 'instrument_name': 'AMSR2', + "time_coverage_start": "2020-08-12T05:58:31.0Z", + "time_coverage_end": "2020-08-12T06:07:01.0Z", + "platform_name": "GCOM-W1", + "instrument_name": "AMSR2", } return attrs @@ -58,43 +58,43 @@ def _get_shared_global_attrs(filename): def _create_two_res_gaasp_dataset(filename): """Represent files with two resolution of variables in them (ex. OCEAN).""" lon_var_hi = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_hi_rez_FOVs'), - attrs={'standard_name': 'longitude'}) + dims=("Number_of_Scans", "Number_of_hi_rez_FOVs"), + attrs={"standard_name": "longitude"}) lat_var_hi = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_hi_rez_FOVs'), - attrs={'standard_name': 'latitude'}) + dims=("Number_of_Scans", "Number_of_hi_rez_FOVs"), + attrs={"standard_name": "latitude"}) lon_var_lo = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - attrs={'standard_name': 'longitude'}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + attrs={"standard_name": "longitude"}) lat_var_lo = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - attrs={'standard_name': 'latitude'}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + attrs={"standard_name": "latitude"}) swath_var1 = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_hi_rez_FOVs'), - coords={'some_longitude_hi': lon_var_hi, 'some_latitude_hi': lat_var_hi}, - attrs={'_FillValue': -9999., - 'scale_factor': 0.5, 'add_offset': 2.0}) + dims=("Number_of_Scans", "Number_of_hi_rez_FOVs"), + coords={"some_longitude_hi": lon_var_hi, "some_latitude_hi": lat_var_hi}, + attrs={"_FillValue": -9999., + "scale_factor": 0.5, "add_offset": 2.0}) swath_var2 = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - coords={'some_longitude_lo': lon_var_lo, 'some_latitude_lo': lat_var_lo}, - attrs={'_FillValue': -9999.}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + coords={"some_longitude_lo": lon_var_lo, "some_latitude_lo": lat_var_lo}, + attrs={"_FillValue": -9999.}) swath_int_var = xr.DataArray(da.zeros((10, 10), dtype=np.uint16), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - attrs={'_FillValue': 100, 'comment': 'Some comment'}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + attrs={"_FillValue": 100, "comment": "Some comment"}) not_xy_dim_var = xr.DataArray(da.zeros((10, 5), dtype=np.float32), - dims=('Number_of_Scans', 'Time_Dimension')) + dims=("Number_of_Scans", "Time_Dimension")) time_var = xr.DataArray(da.zeros((5,), dtype=np.float32), - dims=('Time_Dimension',)) + dims=("Time_Dimension",)) ds_vars = { - 'swath_var_hi': swath_var1, - 'swath_var_low': swath_var2, - 'swath_var_low_int': swath_int_var, - 'some_longitude_hi': lon_var_hi, - 'some_latitude_hi': lat_var_hi, - 'some_longitude_lo': lon_var_lo, - 'some_latitude_lo': lat_var_lo, - 'not_xy_dim_var': not_xy_dim_var, - 'time_var': time_var, + "swath_var_hi": swath_var1, + "swath_var_low": swath_var2, + "swath_var_low_int": swath_int_var, + "some_longitude_hi": lon_var_hi, + "some_latitude_hi": lat_var_hi, + "some_longitude_lo": lon_var_lo, + "some_latitude_lo": lat_var_lo, + "not_xy_dim_var": not_xy_dim_var, + "time_var": time_var, } attrs = _get_shared_global_attrs(filename) ds = xr.Dataset(ds_vars, attrs=attrs) @@ -104,22 +104,22 @@ def _create_two_res_gaasp_dataset(filename): def _create_gridded_gaasp_dataset(filename): """Represent files with gridded products.""" grid_var = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Y_Dimension', 'Number_of_X_Dimension'), + dims=("Number_of_Y_Dimension", "Number_of_X_Dimension"), attrs={ - '_FillValue': -9999., - 'scale_factor': 0.5, 'add_offset': 2.0 + "_FillValue": -9999., + "scale_factor": 0.5, "add_offset": 2.0 }) latency_var = xr.DataArray(da.zeros((10, 10), dtype=np.timedelta64), - dims=('Number_of_Y_Dimension', 'Number_of_X_Dimension'), + dims=("Number_of_Y_Dimension", "Number_of_X_Dimension"), attrs={ - '_FillValue': -9999, + "_FillValue": -9999, }) time_var = xr.DataArray(da.zeros((5,), dtype=np.float32), - dims=('Time_Dimension',)) + dims=("Time_Dimension",)) ds_vars = { - 'grid_var': grid_var, - 'latency_var': latency_var, - 'time_var': time_var, + "grid_var": grid_var, + "latency_var": latency_var, + "time_var": time_var, } attrs = _get_shared_global_attrs(filename) return xr.Dataset(ds_vars, attrs=attrs) @@ -128,29 +128,29 @@ def _create_gridded_gaasp_dataset(filename): def _create_one_res_gaasp_dataset(filename): """Represent files with one resolution of variables in them (ex. SOIL).""" lon_var_lo = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - attrs={'standard_name': 'longitude'}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + attrs={"standard_name": "longitude"}) lat_var_lo = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - attrs={'standard_name': 'latitude'}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + attrs={"standard_name": "latitude"}) swath_var2 = xr.DataArray(da.zeros((10, 10), dtype=np.float32), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - coords={'some_longitude_lo': lon_var_lo, 'some_latitude_lo': lat_var_lo}, + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + coords={"some_longitude_lo": lon_var_lo, "some_latitude_lo": lat_var_lo}, attrs={ - '_FillValue': -9999., - 'scale_factor': 0.5, 'add_offset': 2.0 + "_FillValue": -9999., + "scale_factor": 0.5, "add_offset": 2.0 }) swath_int_var = xr.DataArray(da.zeros((10, 10), dtype=np.uint16), - dims=('Number_of_Scans', 'Number_of_low_rez_FOVs'), - attrs={'_FillValue': 100, 'comment': 'Some comment'}) + dims=("Number_of_Scans", "Number_of_low_rez_FOVs"), + attrs={"_FillValue": 100, "comment": "Some comment"}) time_var = xr.DataArray(da.zeros((5,), dtype=np.float32), - dims=('Time_Dimension',)) + dims=("Time_Dimension",)) ds_vars = { - 'swath_var': swath_var2, - 'swath_var_int': swath_int_var, - 'some_longitude_lo': lon_var_lo, - 'some_latitude_lo': lat_var_lo, - 'time_var': time_var, + "swath_var": swath_var2, + "swath_var_int": swath_int_var, + "some_longitude_lo": lon_var_lo, + "some_latitude_lo": lat_var_lo, + "time_var": time_var, } attrs = _get_shared_global_attrs(filename) return xr.Dataset(ds_vars, attrs=attrs) @@ -168,12 +168,12 @@ def fake_open_dataset(filename, **kwargs): class TestGAASPReader: """Tests for the GAASP reader.""" - yaml_file = 'amsr2_l2_gaasp.yaml' + yaml_file = "amsr2_l2_gaasp.yaml" def setup_method(self): """Wrap pygrib to read fake data.""" from satpy._config import config_search_paths - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) @pytest.mark.parametrize( ("filenames", "expected_loadables"), @@ -191,7 +191,7 @@ def setup_method(self): def test_reader_creation(self, filenames, expected_loadables): """Test basic initialization.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.amsr2_l2_gaasp.xr.open_dataset') as od: + with mock.patch("satpy.readers.amsr2_l2_gaasp.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) @@ -203,26 +203,26 @@ def test_reader_creation(self, filenames, expected_loadables): @pytest.mark.parametrize( ("filenames", "expected_datasets"), [ - (EXAMPLE_FILENAMES, ['swath_var_hi', 'swath_var_low', - 'swath_var_low_int', 'swath_var', - 'swath_var_int', - 'grid_var_NH', 'grid_var_SH', - 'latency_var_NH', 'latency_var_SH']), - ([MBT_FILENAME], ['swath_var_hi', 'swath_var_low', - 'swath_var_low_int']), - ([PRECIP_FILENAME], ['swath_var_hi', 'swath_var_low', - 'swath_var_low_int']), - ([OCEAN_FILENAME], ['swath_var_hi', 'swath_var_low', - 'swath_var_low_int']), - ([SEAICE_NH_FILENAME], ['grid_var_NH', 'latency_var_NH']), - ([SEAICE_SH_FILENAME], ['grid_var_SH', 'latency_var_SH']), - ([SNOW_FILENAME], ['swath_var', 'swath_var_int']), - ([SOIL_FILENAME], ['swath_var', 'swath_var_int']), + (EXAMPLE_FILENAMES, ["swath_var_hi", "swath_var_low", + "swath_var_low_int", "swath_var", + "swath_var_int", + "grid_var_NH", "grid_var_SH", + "latency_var_NH", "latency_var_SH"]), + ([MBT_FILENAME], ["swath_var_hi", "swath_var_low", + "swath_var_low_int"]), + ([PRECIP_FILENAME], ["swath_var_hi", "swath_var_low", + "swath_var_low_int"]), + ([OCEAN_FILENAME], ["swath_var_hi", "swath_var_low", + "swath_var_low_int"]), + ([SEAICE_NH_FILENAME], ["grid_var_NH", "latency_var_NH"]), + ([SEAICE_SH_FILENAME], ["grid_var_SH", "latency_var_SH"]), + ([SNOW_FILENAME], ["swath_var", "swath_var_int"]), + ([SOIL_FILENAME], ["swath_var", "swath_var_int"]), ]) def test_available_datasets(self, filenames, expected_datasets): """Test that variables are dynamically discovered.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.amsr2_l2_gaasp.xr.open_dataset') as od: + with mock.patch("satpy.readers.amsr2_l2_gaasp.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) @@ -230,24 +230,24 @@ def test_available_datasets(self, filenames, expected_datasets): avails = list(r.available_dataset_names) for var_name in expected_datasets: assert var_name in avails - assert 'not_xy_dim_var' not in expected_datasets + assert "not_xy_dim_var" not in expected_datasets @staticmethod def _check_area(data_id, data_arr): from pyresample.geometry import AreaDefinition, SwathDefinition - area = data_arr.attrs['area'] - if 'grid_var' in data_id['name'] or 'latency_var' in data_id['name']: + area = data_arr.attrs["area"] + if "grid_var" in data_id["name"] or "latency_var" in data_id["name"]: assert isinstance(area, AreaDefinition) else: assert isinstance(area, SwathDefinition) @staticmethod def _check_fill(data_id, data_arr): - if 'int' in data_id['name']: - assert data_arr.attrs['_FillValue'] == 100 + if "int" in data_id["name"]: + assert data_arr.attrs["_FillValue"] == 100 assert np.issubdtype(data_arr.dtype, np.integer) else: - assert '_FillValue' not in data_arr.attrs + assert "_FillValue" not in data_arr.attrs if np.issubdtype(data_arr.dtype, np.floating): # we started with float32, it should stay that way assert data_arr.dtype.type == np.float32 @@ -255,33 +255,33 @@ def _check_fill(data_id, data_arr): @staticmethod def _check_attrs(data_arr): attrs = data_arr.attrs - assert 'scale_factor' not in attrs - assert 'add_offset' not in attrs - assert attrs['platform_name'] == 'GCOM-W1' - assert attrs['sensor'] == 'amsr2' - assert attrs['start_time'] == datetime(2020, 8, 12, 5, 58, 31) - assert attrs['end_time'] == datetime(2020, 8, 12, 6, 7, 1) + assert "scale_factor" not in attrs + assert "add_offset" not in attrs + assert attrs["platform_name"] == "GCOM-W1" + assert attrs["sensor"] == "amsr2" + assert attrs["start_time"] == datetime(2020, 8, 12, 5, 58, 31) + assert attrs["end_time"] == datetime(2020, 8, 12, 6, 7, 1) @pytest.mark.parametrize( ("filenames", "loadable_ids"), [ - (EXAMPLE_FILENAMES, ['swath_var_hi', 'swath_var_low', - 'swath_var_low_int', 'swath_var', - 'swath_var_int', - 'grid_var_NH', 'grid_var_SH', - 'latency_var_NH', 'latency_var_SH']), - ([MBT_FILENAME], ['swath_var_hi', 'swath_var_low', 'swath_var_low_int']), - ([PRECIP_FILENAME], ['swath_var_hi', 'swath_var_low', 'swath_var_low_int']), - ([OCEAN_FILENAME], ['swath_var_hi', 'swath_var_low', 'swath_var_low_int']), - ([SEAICE_NH_FILENAME], ['grid_var_NH', 'latency_var_NH']), - ([SEAICE_SH_FILENAME], ['grid_var_SH', 'latency_var_SH']), - ([SNOW_FILENAME], ['swath_var', 'swath_var_int']), - ([SOIL_FILENAME], ['swath_var', 'swath_var_int']), + (EXAMPLE_FILENAMES, ["swath_var_hi", "swath_var_low", + "swath_var_low_int", "swath_var", + "swath_var_int", + "grid_var_NH", "grid_var_SH", + "latency_var_NH", "latency_var_SH"]), + ([MBT_FILENAME], ["swath_var_hi", "swath_var_low", "swath_var_low_int"]), + ([PRECIP_FILENAME], ["swath_var_hi", "swath_var_low", "swath_var_low_int"]), + ([OCEAN_FILENAME], ["swath_var_hi", "swath_var_low", "swath_var_low_int"]), + ([SEAICE_NH_FILENAME], ["grid_var_NH", "latency_var_NH"]), + ([SEAICE_SH_FILENAME], ["grid_var_SH", "latency_var_SH"]), + ([SNOW_FILENAME], ["swath_var", "swath_var_int"]), + ([SOIL_FILENAME], ["swath_var", "swath_var_int"]), ]) def test_basic_load(self, filenames, loadable_ids): """Test that variables are loaded properly.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.amsr2_l2_gaasp.xr.open_dataset') as od: + with mock.patch("satpy.readers.amsr2_l2_gaasp.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) diff --git a/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py b/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py index a65d0638f5..07ed218e72 100644 --- a/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py +++ b/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py @@ -42,42 +42,42 @@ def create_message(): surfaceSoilMoisture = np.round(rstate.rand(samples)*100, 1) surfaceSoilMoisture[0] = -1e+100 retmsg = { - 'inputDelayedDescriptorReplicationFactor': [8], - 'edition': 4, - 'masterTableNumber': 0, - 'bufrHeaderCentre': 254, - 'bufrHeaderSubCentre': 0, - 'updateSequenceNumber': 0, - 'dataCategory': 12, - 'internationalDataSubCategory': 255, - 'dataSubCategory': 190, - 'masterTablesVersionNumber': 13, - 'localTablesVersionNumber': 0, - 'typicalYear': 2020, - 'typicalMonth': 12, - 'typicalDay': 21, - 'typicalHour': 9, - 'typicalMinute': 33, - 'typicalSecond': 0, - 'numberOfSubsets': samples, - 'observedData': 1, - 'compressedData': 1, - 'unexpandedDescriptors': 312061, - 'centre': 254, - 'subCentre': 0, - '#1#softwareIdentification': 1000, - 'satelliteIdentifier': 4, - 'satelliteInstruments': 190, - 'year': 2020, - 'month': 12, - 'day': 21, - 'hour': 9, - 'minute': 33, - 'second': np.linspace(0, 59, samples), - 'latitude': lat, - 'longitude': lon, - 'surfaceSoilMoisture': surfaceSoilMoisture, - 'soilMoistureQuality': np.zeros(samples), + "inputDelayedDescriptorReplicationFactor": [8], + "edition": 4, + "masterTableNumber": 0, + "bufrHeaderCentre": 254, + "bufrHeaderSubCentre": 0, + "updateSequenceNumber": 0, + "dataCategory": 12, + "internationalDataSubCategory": 255, + "dataSubCategory": 190, + "masterTablesVersionNumber": 13, + "localTablesVersionNumber": 0, + "typicalYear": 2020, + "typicalMonth": 12, + "typicalDay": 21, + "typicalHour": 9, + "typicalMinute": 33, + "typicalSecond": 0, + "numberOfSubsets": samples, + "observedData": 1, + "compressedData": 1, + "unexpandedDescriptors": 312061, + "centre": 254, + "subCentre": 0, + "#1#softwareIdentification": 1000, + "satelliteIdentifier": 4, + "satelliteInstruments": 190, + "year": 2020, + "month": 12, + "day": 21, + "hour": 9, + "minute": 33, + "second": np.linspace(0, 59, samples), + "latitude": lat, + "longitude": lon, + "surfaceSoilMoisture": surfaceSoilMoisture, + "soilMoistureQuality": np.zeros(samples), } return retmsg @@ -85,22 +85,22 @@ def create_message(): MSG = create_message() # the notional filename that would contain the above test message data -FILENAME = 'W_XX-EUMETSAT-TEST,SOUNDING+SATELLITE,METOPA+ASCAT_C_EUMC_20201221093300_73545_eps_o_125_ssm_l2.bin' +FILENAME = "W_XX-EUMETSAT-TEST,SOUNDING+SATELLITE,METOPA+ASCAT_C_EUMC_20201221093300_73545_eps_o_125_ssm_l2.bin" # the information that would be extracted from the above filename according to the pattern in the .yaml FILENAME_INFO = { - 'reception_location': 'TEST', - 'platform': 'METOPA', - 'instrument': 'ASCAT', - 'start_time': '20201221093300', - 'perigee': '73545', - 'species': '125_ssm', - 'level': 'l2' + "reception_location": "TEST", + "platform": "METOPA", + "instrument": "ASCAT", + "start_time": "20201221093300", + "perigee": "73545", + "species": "125_ssm", + "level": "l2" } # file type info for the above file that is defined in the .yaml FILETYPE_INFO = { - 'file_type': 'ascat_l2_soilmoisture_bufr', - 'file_reader': 'AscatSoilMoistureBufr' + "file_type": "ascat_l2_soilmoisture_bufr", + "file_reader": "AscatSoilMoistureBufr" } @@ -110,14 +110,14 @@ def save_test_data(path): filepath = os.path.join(path, FILENAME) with open(filepath, "wb") as f: for m in [MSG]: - buf = ec.codes_bufr_new_from_samples('BUFR4_local_satellite') + buf = ec.codes_bufr_new_from_samples("BUFR4_local_satellite") for key in m: val = m[key] if np.isscalar(val): ec.codes_set(buf, key, val) else: ec.codes_set_array(buf, key, val) - ec.codes_set(buf, 'pack', 1) + ec.codes_set(buf, "pack", 1) ec.codes_write(buf, f) ec.codes_release(buf) return filepath @@ -145,43 +145,43 @@ def tearDown(self): except OSError: pass - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene(self): """Test scene creation.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) - scn = Scene(reader='ascat_l2_soilmoisture_bufr', filenames=[fname]) - self.assertTrue('scatterometer' in scn.sensor_names) - self.assertTrue(datetime(2020, 12, 21, 9, 33, 0) == scn.start_time) - self.assertTrue(datetime(2020, 12, 21, 9, 33, 59) == scn.end_time) + scn = Scene(reader="ascat_l2_soilmoisture_bufr", filenames=[fname]) + assert "scatterometer" in scn.sensor_names + assert datetime(2020, 12, 21, 9, 33, 0) == scn.start_time + assert datetime(2020, 12, 21, 9, 33, 59) == scn.end_time - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_load_available_datasets(self): """Test that all datasets are available.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) - scn = Scene(reader='ascat_l2_soilmoisture_bufr', filenames=[fname]) - self.assertTrue('surface_soil_moisture' in scn.available_dataset_names()) + scn = Scene(reader="ascat_l2_soilmoisture_bufr", filenames=[fname]) + assert "surface_soil_moisture" in scn.available_dataset_names() scn.load(scn.available_dataset_names()) loaded = [dataset.name for dataset in scn] - self.assertTrue(sorted(loaded) == sorted(scn.available_dataset_names())) + assert sorted(loaded) == sorted(scn.available_dataset_names()) - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_dataset_values(self): """Test loading data.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) - scn = Scene(reader='ascat_l2_soilmoisture_bufr', filenames=[fname]) + scn = Scene(reader="ascat_l2_soilmoisture_bufr", filenames=[fname]) for name in scn.available_dataset_names(): scn.load([name]) loaded_values = scn[name].values - fill_value = scn[name].attrs['fill_value'] + fill_value = scn[name].attrs["fill_value"] # replace nans in data loaded from file with the fill value defined in the .yaml # to make them comparable loaded_values_nan_filled = np.nan_to_num(loaded_values, nan=fill_value) - key = scn[name].attrs['key'] + key = scn[name].attrs["key"] original_values = MSG[key] # this makes each assertion below a separate test from unittest's point of view # (note: if all subtests pass, they will count as one test) with self.subTest(msg="Test failed for dataset: "+name): - self.assertTrue(np.allclose(original_values, loaded_values_nan_filled)) + assert np.allclose(original_values, loaded_values_nan_filled) diff --git a/satpy/tests/reader_tests/test_atms_l1b_nc.py b/satpy/tests/reader_tests/test_atms_l1b_nc.py index eca5454307..6b27081ed9 100644 --- a/satpy/tests/reader_tests/test_atms_l1b_nc.py +++ b/satpy/tests/reader_tests/test_atms_l1b_nc.py @@ -27,7 +27,7 @@ # - tmp_path -@pytest.fixture +@pytest.fixture() def reader(l1b_file): """Return reader of ATMS level1b data.""" return AtmsL1bNCFileHandler( @@ -37,15 +37,15 @@ def reader(l1b_file): ) -@pytest.fixture +@pytest.fixture() def l1b_file(tmp_path, atms_fake_dataset): """Return file path to level1b file.""" l1b_file_path = tmp_path / "test_file_atms_l1b.nc" atms_fake_dataset.to_netcdf(l1b_file_path) - yield l1b_file_path + return l1b_file_path -@pytest.fixture +@pytest.fixture() def atms_fake_dataset(): """Return fake ATMS dataset.""" atrack = 2 @@ -99,20 +99,20 @@ def test_antenna_temperature(self, reader, atms_fake_dataset): atms_fake_dataset.antenna_temp.values, ) - @pytest.mark.parametrize("param,expect", ( + @pytest.mark.parametrize(("param", "expect"), [ ("start_time", datetime(2000, 1, 2, 3, 4, 5)), ("end_time", datetime(2000, 1, 2, 4, 5, 6)), ("platform_name", "JPSS-1"), ("sensor", "ATMS"), - )) + ]) def test_attrs(self, reader, param, expect): """Test attributes.""" assert reader.attrs[param] == expect - @pytest.mark.parametrize("dims", ( + @pytest.mark.parametrize("dims", [ ("xtrack", "atrack"), ("x", "y"), - )) + ]) def test_standardize_dims(self, reader, dims): """Test standardize dims.""" data = xr.DataArray( @@ -134,7 +134,7 @@ def test_drop_coords(self, reader): data = reader._drop_coords(data) assert coords not in data.coords - @pytest.mark.parametrize("param,expect", ( + @pytest.mark.parametrize(("param", "expect"), [ ("start_time", datetime(2000, 1, 2, 3, 4, 5)), ("end_time", datetime(2000, 1, 2, 4, 5, 6)), ("platform_name", "JPSS-1"), @@ -142,7 +142,7 @@ def test_drop_coords(self, reader): ("creation_time", datetime(2020, 1, 2, 3, 4, 5)), ("type", "test_data"), ("name", "test"), - )) + ]) def test_merge_attributes(self, reader, param, expect): """Test merge attributes.""" data = xr.DataArray( @@ -154,10 +154,10 @@ def test_merge_attributes(self, reader, param, expect): data = reader._merge_attributes(data, dataset_info) assert data.attrs[param] == expect - @pytest.mark.parametrize("param,expect", ( + @pytest.mark.parametrize(("param", "expect"), [ ("1", 100.), ("sat_azi", 3.), - )) + ]) def test_select_dataset(self, reader, param, expect): """Test select dataset.""" np.testing.assert_array_equal( diff --git a/satpy/tests/reader_tests/test_atms_sdr_hdf5.py b/satpy/tests/reader_tests/test_atms_sdr_hdf5.py index 96fb4d7305..8971c2d933 100644 --- a/satpy/tests/reader_tests/test_atms_sdr_hdf5.py +++ b/satpy/tests/reader_tests/test_atms_sdr_hdf5.py @@ -53,14 +53,14 @@ def __init__(self, filename, filename_info, filetype_info, include_factors=True) @staticmethod def _add_basic_metadata_to_file_content(file_content, filename_info, num_grans): - start_time = filename_info['start_time'] - end_time = filename_info['end_time'].replace(year=start_time.year, + start_time = filename_info["start_time"] + end_time = filename_info["end_time"].replace(year=start_time.year, month=start_time.month, day=start_time.day) - begin_date = start_time.strftime('%Y%m%d') - begin_time = start_time.strftime('%H%M%S.%fZ') - ending_date = end_time.strftime('%Y%m%d') - ending_time = end_time.strftime('%H%M%S.%fZ') + begin_date = start_time.strftime("%Y%m%d") + begin_time = start_time.strftime("%H%M%S.%fZ") + ending_date = end_time.strftime("%Y%m%d") + ending_time = end_time.strftime("%H%M%S.%fZ") new_file_content = { "{prefix2}/attr/AggregateNumberGranules": num_grans, "{prefix2}/attr/AggregateBeginningDate": begin_date, @@ -69,8 +69,8 @@ def _add_basic_metadata_to_file_content(file_content, filename_info, num_grans): "{prefix2}/attr/AggregateEndingTime": ending_time, "{prefix2}/attr/G-Ring_Longitude": np.array([0.0, 0.1, 0.2, 0.3]), "{prefix2}/attr/G-Ring_Latitude": np.array([0.0, 0.1, 0.2, 0.3]), - "{prefix2}/attr/AggregateBeginningOrbitNumber": "{0:d}".format(filename_info['orbit']), - "{prefix2}/attr/AggregateEndingOrbitNumber": "{0:d}".format(filename_info['orbit']), + "{prefix2}/attr/AggregateBeginningOrbitNumber": "{0:d}".format(filename_info["orbit"]), + "{prefix2}/attr/AggregateEndingOrbitNumber": "{0:d}".format(filename_info["orbit"]), "{prefix1}/attr/Instrument_Short_Name": "ATMS", "/attr/Platform_Short_Name": "J01", } @@ -82,13 +82,13 @@ def _add_granule_specific_info_to_file_content(self, file_content, dataset_group lats_lists = self._get_per_granule_lats() file_content["{prefix3}/NumberOfScans"] = np.array([1] * num_granules) for granule_idx in range(num_granules): - prefix_gran = '{prefix}/{dataset_group}_Gran_{idx}'.format(prefix=gran_group_prefix, + prefix_gran = "{prefix}/{dataset_group}_Gran_{idx}".format(prefix=gran_group_prefix, dataset_group=dataset_group, idx=granule_idx) num_scans = num_scans_per_granule[granule_idx] - file_content[prefix_gran + '/attr/N_Number_Of_Scans'] = num_scans - file_content[prefix_gran + '/attr/G-Ring_Longitude'] = lons_lists[granule_idx] - file_content[prefix_gran + '/attr/G-Ring_Latitude'] = lats_lists[granule_idx] + file_content[prefix_gran + "/attr/N_Number_Of_Scans"] = num_scans + file_content[prefix_gran + "/attr/G-Ring_Longitude"] = lons_lists[granule_idx] + file_content[prefix_gran + "/attr/G-Ring_Latitude"] = lats_lists[granule_idx] @staticmethod def _get_per_granule_lons(): @@ -152,7 +152,7 @@ def _add_data_info_to_file_content(self, file_content, filename, data_var_prefix # ATMS SDR files always produce data with 12 scans per granule even if there are less? FIXME! total_rows = DEFAULT_FILE_SHAPE[0] * 12 * num_grans new_shape = (total_rows, DEFAULT_FILE_SHAPE[1], self._num_of_bands) - key = 'BrightnessTemperature' + key = "BrightnessTemperature" key = data_var_prefix + "/" + key file_content[key] = np.repeat(DEFAULT_FILE_DATA.copy(), 12 * num_grans, axis=0) file_content[key] = np.repeat(file_content[key][:, :, np.newaxis], self._num_of_bands, axis=2) @@ -181,10 +181,10 @@ def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefi file_content[k] = np.repeat([file_content[k]], total_rows, axis=0) file_content[k + "/shape"] = new_shape - angles = ['SolarZenithAngle', - 'SolarAzimuthAngle', - 'SatelliteZenithAngle', - 'SatelliteAzimuthAngle'] + angles = ["SolarZenithAngle", + "SolarAzimuthAngle", + "SatelliteZenithAngle", + "SatelliteAzimuthAngle"] for k in angles: k = data_var_prefix + "/" + k file_content[k] = lon_data # close enough to SZA @@ -193,8 +193,8 @@ def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefi @staticmethod def _add_geo_ref(file_content, filename): - geo_prefix = 'GATMO' - file_content['/attr/N_GEO_Ref'] = geo_prefix + filename[5:] + geo_prefix = "GATMO" + file_content["/attr/N_GEO_Ref"] = geo_prefix + filename[5:] @staticmethod def _convert_numpy_content_to_dataarray(final_content): @@ -204,9 +204,9 @@ def _convert_numpy_content_to_dataarray(final_content): if isinstance(val, np.ndarray): val = da.from_array(val, chunks=val.shape) if val.ndim > 2: - final_content[key] = DataArray(val, dims=('y', 'x', 'z')) + final_content[key] = DataArray(val, dims=("y", "x", "z")) elif val.ndim > 1: - final_content[key] = DataArray(val, dims=('y', 'x')) + final_content[key] = DataArray(val, dims=("y", "x")) else: final_content[key] = DataArray(val) @@ -215,9 +215,9 @@ def get_test_content(self, filename, filename_info, filetype_info): final_content = {} for dataset in self.datasets: dataset_group = DATASET_KEYS[dataset] - prefix1 = 'Data_Products/{dataset_group}'.format(dataset_group=dataset_group) - prefix2 = '{prefix}/{dataset_group}_Aggr'.format(prefix=prefix1, dataset_group=dataset_group) - prefix3 = 'All_Data/{dataset_group}_All'.format(dataset_group=dataset_group) + prefix1 = "Data_Products/{dataset_group}".format(dataset_group=dataset_group) + prefix2 = "{prefix}/{dataset_group}_Aggr".format(prefix=prefix1, dataset_group=dataset_group) + prefix3 = "All_Data/{dataset_group}_All".format(dataset_group=dataset_group) file_content = {} self._add_basic_metadata_to_file_content(file_content, filename_info, self._num_test_granules) @@ -229,10 +229,10 @@ def get_test_content(self, filename, filename_info, filetype_info): for k, v in list(file_content.items()): file_content[k.format(prefix1=prefix1, prefix2=prefix2, prefix3=prefix3)] = v - if filename[:5] in ['SATMS', 'TATMS']: + if filename[:5] in ["SATMS", "TATMS"]: self._add_data_info_to_file_content(file_content, filename, prefix3, self._num_test_granules) - elif filename[0] == 'G': + elif filename[0] == "G": self._add_geolocation_info_to_file_content(file_content, filename, prefix3, self._num_test_granules) final_content.update(file_content) @@ -248,23 +248,23 @@ class TestATMS_SDR_Reader: def _assert_bt_properties(self, data_arr, num_scans=1, with_area=True): assert np.issubdtype(data_arr.dtype, np.float32) - assert data_arr.attrs['calibration'] == 'brightness_temperature' - assert data_arr.attrs['units'] == 'K' - assert data_arr.attrs['rows_per_scan'] == num_scans + assert data_arr.attrs["calibration"] == "brightness_temperature" + assert data_arr.attrs["units"] == "K" + assert data_arr.attrs["rows_per_scan"] == num_scans if with_area: - assert 'area' in data_arr.attrs - assert data_arr.attrs['area'] is not None - assert data_arr.attrs['area'].shape == data_arr.shape + assert "area" in data_arr.attrs + assert data_arr.attrs["area"] is not None + assert data_arr.attrs["area"].shape == data_arr.shape else: - assert 'area' not in data_arr.attrs + assert "area" not in data_arr.attrs def setup_method(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy.readers.viirs_atms_sdr_base import JPSS_SDR_FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(JPSS_SDR_FileHandler, '__bases__', (FakeHDF5_ATMS_SDR_FileHandler,)) + self.p = mock.patch.object(JPSS_SDR_FileHandler, "__bases__", (FakeHDF5_ATMS_SDR_FileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -277,7 +277,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - '/path/to/atms/sdr/data/SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5', + "/path/to/atms/sdr/data/SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) @@ -288,22 +288,22 @@ def test_init_start_end_time(self): """Test basic init with start and end times around the start/end times of the provided file.""" r = load_reader(self.reader_configs, filter_parameters={ - 'start_time': datetime(2022, 12, 19), - 'end_time': datetime(2022, 12, 21) + "start_time": datetime(2022, 12, 19), + "end_time": datetime(2022, 12, 21) }) loadables = r.select_files_from_pathnames([ - 'SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5', + "SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files assert r.file_handlers - @pytest.mark.parametrize("files, expected", - [(['SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5', - 'GATMO_j01_d20221220_t0910240_e0921356_b26361_c20221220100456680030_cspp_dev.h5'], + @pytest.mark.parametrize(("files", "expected"), + [(["SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5", + "GATMO_j01_d20221220_t0910240_e0921356_b26361_c20221220100456680030_cspp_dev.h5"], True), - (['SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5', ], + (["SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5", ], False)] ) def test_load_all_bands(self, files, expected): diff --git a/satpy/tests/reader_tests/test_avhrr_l0_hrpt.py b/satpy/tests/reader_tests/test_avhrr_l0_hrpt.py index e5241ba025..b1504e9014 100644 --- a/satpy/tests/reader_tests/test_avhrr_l0_hrpt.py +++ b/satpy/tests/reader_tests/test_avhrr_l0_hrpt.py @@ -44,7 +44,7 @@ def setUp(self) -> None: test_data["id"]["id"][:5] = 891 # Channel 3b test_data["id"]["id"][5:] = 890 - with NamedTemporaryFile(mode='w+', suffix='.hmf', delete=False) as hrpt_file: + with NamedTemporaryFile(mode="w+", suffix=".hmf", delete=False) as hrpt_file: self.filename = hrpt_file.name test_data.tofile(hrpt_file) @@ -71,7 +71,7 @@ class TestHRPTGetUncalibratedData(TestHRPTWithFile): """Test case for reading uncalibrated hrpt data.""" def _get_channel_1_counts(self): - return self._get_dataset(make_dataid(name='1', calibration='counts')) + return self._get_dataset(make_dataid(name="1", calibration="counts")) def test_get_dataset_returns_a_dataarray(self): """Test that get_dataset returns a dataarray.""" @@ -81,7 +81,7 @@ def test_get_dataset_returns_a_dataarray(self): def test_platform_name(self): """Test that the platform name is correct.""" result = self._get_channel_1_counts() - assert result.attrs['platform_name'] == 'NOAA 19' + assert result.attrs["platform_name"] == "NOAA 19" def test_no_calibration_values_are_1(self): """Test that the values of non-calibrated data is 1.""" @@ -137,7 +137,7 @@ class TestHRPTGetCalibratedReflectances(TestHRPTWithPatchedCalibratorAndFile): def _get_channel_1_reflectance(self): """Get the channel 1 reflectance.""" - dataset_id = make_dataid(name='1', calibration='reflectance') + dataset_id = make_dataid(name="1", calibration="reflectance") return self._get_dataset(dataset_id) def test_calibrated_reflectances_values(self): @@ -151,7 +151,7 @@ class TestHRPTGetCalibratedBT(TestHRPTWithPatchedCalibratorAndFile): def _get_channel_4_bt(self): """Get the channel 4 bt.""" - dataset_id = make_dataid(name='4', calibration='brightness_temperature') + dataset_id = make_dataid(name="4", calibration="brightness_temperature") return self._get_dataset(dataset_id) def test_calibrated_bt_values(self): @@ -165,17 +165,17 @@ class TestHRPTChannel3(TestHRPTWithPatchedCalibratorAndFile): def _get_channel_3b_bt(self): """Get the channel 4 bt.""" - dataset_id = make_dataid(name='3b', calibration='brightness_temperature') + dataset_id = make_dataid(name="3b", calibration="brightness_temperature") return self._get_dataset(dataset_id) def _get_channel_3a_reflectance(self): """Get the channel 4 bt.""" - dataset_id = make_dataid(name='3a', calibration='reflectance') + dataset_id = make_dataid(name="3a", calibration="reflectance") return self._get_dataset(dataset_id) def _get_channel_3a_counts(self): """Get the channel 4 bt.""" - dataset_id = make_dataid(name='3a', calibration='counts') + dataset_id = make_dataid(name="3a", calibration="counts") return self._get_dataset(dataset_id) def test_channel_3b_masking(self): @@ -212,7 +212,7 @@ def _prepare_mocks(self, Orbital, SatelliteInterpolator, get_lonlatalt): get_lonlatalt.return_value = (mock.MagicMock(), mock.MagicMock(), mock.MagicMock()) SatelliteInterpolator.return_value.interpolate.return_value = self.fake_lons, self.fake_lats - @mock.patch.multiple('satpy.readers.hrpt', + @mock.patch.multiple("satpy.readers.hrpt", Orbital=mock.DEFAULT, compute_pixels=mock.DEFAULT, get_lonlatalt=mock.DEFAULT, @@ -220,11 +220,11 @@ def _prepare_mocks(self, Orbital, SatelliteInterpolator, get_lonlatalt): def test_longitudes_are_returned(self, Orbital, compute_pixels, get_lonlatalt, SatelliteInterpolator): """Check that latitudes are returned properly.""" self._prepare_mocks(Orbital, SatelliteInterpolator, get_lonlatalt) - dataset_id = make_dataid(name='longitude') + dataset_id = make_dataid(name="longitude") result = self._get_dataset(dataset_id) assert (result == self.fake_lons).all() - @mock.patch.multiple('satpy.readers.hrpt', + @mock.patch.multiple("satpy.readers.hrpt", Orbital=mock.DEFAULT, compute_pixels=mock.DEFAULT, get_lonlatalt=mock.DEFAULT, @@ -232,6 +232,6 @@ def test_longitudes_are_returned(self, Orbital, compute_pixels, get_lonlatalt, S def test_latitudes_are_returned(self, Orbital, compute_pixels, get_lonlatalt, SatelliteInterpolator): """Check that latitudes are returned properly.""" self._prepare_mocks(Orbital, SatelliteInterpolator, get_lonlatalt) - dataset_id = make_dataid(name='latitude') + dataset_id = make_dataid(name="latitude") result = self._get_dataset(dataset_id) assert (result == self.fake_lats).all() diff --git a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py index 4a543b449c..2272a950bf 100644 --- a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py +++ b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py @@ -27,49 +27,49 @@ GAC_PATTERN = '{creation_site:3s}.{transfer_mode:4s}.{platform_id:2s}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{orbit_number:05d}{end_orbit_last_digits:02d}.{station:2s}' # noqa -GAC_POD_FILENAMES = ['NSS.GHRR.NA.D79184.S1150.E1337.B0008384.WI', - 'NSS.GHRR.NA.D79184.S2350.E0137.B0008384.WI', - 'NSS.GHRR.NA.D80021.S0927.E1121.B0295354.WI', - 'NSS.GHRR.NA.D80021.S1120.E1301.B0295455.WI', - 'NSS.GHRR.NA.D80021.S1256.E1450.B0295556.GC', - 'NSS.GHRR.NE.D83208.S1219.E1404.B0171819.WI', - 'NSS.GHRR.NG.D88002.S0614.E0807.B0670506.WI', - 'NSS.GHRR.TN.D79183.S1258.E1444.B0369697.GC', - 'NSS.GHRR.TN.D80003.S1147.E1332.B0630506.GC', - 'NSS.GHRR.TN.D80003.S1328.E1513.B0630507.GC', - 'NSS.GHRR.TN.D80003.S1509.E1654.B0630608.GC'] - -GAC_KLM_FILENAMES = ['NSS.GHRR.NK.D01235.S0252.E0446.B1703233.GC', - 'NSS.GHRR.NL.D01288.S2315.E0104.B0549495.GC', - 'NSS.GHRR.NM.D04111.S2305.E0050.B0947778.GC', - 'NSS.GHRR.NN.D13011.S0559.E0741.B3939192.WI', - 'NSS.GHRR.NP.D15361.S0121.E0315.B3547172.SV', - 'NSS.GHRR.M1.D15362.S0031.E0129.B1699697.SV', - 'NSS.GHRR.M2.D10178.S2359.E0142.B1914142.SV'] - -LAC_POD_FILENAMES = ['BRN.HRPT.ND.D95152.S1730.E1715.B2102323.UB', - 'BRN.HRPT.ND.D95152.S1910.E1857.B2102424.UB', - 'BRN.HRPT.NF.D85152.S1345.E1330.B0241414.UB', - 'BRN.HRPT.NJ.D95152.S1233.E1217.B0216060.UB'] - -LAC_KLM_FILENAMES = ['BRN.HRPT.M1.D14152.S0958.E1012.B0883232.UB', - 'BRN.HRPT.M1.D14152.S1943.E1958.B0883838.UB', - 'BRN.HRPT.M2.D12153.S0912.E0922.B2914747.UB', - 'BRN.HRPT.NN.D12153.S0138.E0152.B3622828.UB', - 'BRN.HRPT.NN.D12153.S0139.E0153.B3622828.UB', - 'BRN.HRPT.NN.D12153.S1309.E1324.B3623535.UB', - 'BRN.HRPT.NP.D12153.S0003.E0016.B1707272.UB', - 'BRN.HRPT.NP.D12153.S1134.E1148.B1707979.UB', - 'BRN.HRPT.NP.D16184.S1256.E1311.B3813131.UB', - 'BRN.HRPT.NP.D16184.S1438.E1451.B3813232.UB', - 'BRN.HRPT.NP.D16184.S1439.E1451.B3813232.UB', - 'BRN.HRPT.NP.D16185.S1245.E1259.B3814545.UB', - 'BRN.HRPT.NP.D16185.S1427.E1440.B3814646.UB', - 'NSS.FRAC.M2.D12153.S1729.E1910.B2915354.SV', - 'NSS.LHRR.NP.D16306.S1803.E1814.B3985555.WI'] - - -@mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__', return_value=None) +GAC_POD_FILENAMES = ["NSS.GHRR.NA.D79184.S1150.E1337.B0008384.WI", + "NSS.GHRR.NA.D79184.S2350.E0137.B0008384.WI", + "NSS.GHRR.NA.D80021.S0927.E1121.B0295354.WI", + "NSS.GHRR.NA.D80021.S1120.E1301.B0295455.WI", + "NSS.GHRR.NA.D80021.S1256.E1450.B0295556.GC", + "NSS.GHRR.NE.D83208.S1219.E1404.B0171819.WI", + "NSS.GHRR.NG.D88002.S0614.E0807.B0670506.WI", + "NSS.GHRR.TN.D79183.S1258.E1444.B0369697.GC", + "NSS.GHRR.TN.D80003.S1147.E1332.B0630506.GC", + "NSS.GHRR.TN.D80003.S1328.E1513.B0630507.GC", + "NSS.GHRR.TN.D80003.S1509.E1654.B0630608.GC"] + +GAC_KLM_FILENAMES = ["NSS.GHRR.NK.D01235.S0252.E0446.B1703233.GC", + "NSS.GHRR.NL.D01288.S2315.E0104.B0549495.GC", + "NSS.GHRR.NM.D04111.S2305.E0050.B0947778.GC", + "NSS.GHRR.NN.D13011.S0559.E0741.B3939192.WI", + "NSS.GHRR.NP.D15361.S0121.E0315.B3547172.SV", + "NSS.GHRR.M1.D15362.S0031.E0129.B1699697.SV", + "NSS.GHRR.M2.D10178.S2359.E0142.B1914142.SV"] + +LAC_POD_FILENAMES = ["BRN.HRPT.ND.D95152.S1730.E1715.B2102323.UB", + "BRN.HRPT.ND.D95152.S1910.E1857.B2102424.UB", + "BRN.HRPT.NF.D85152.S1345.E1330.B0241414.UB", + "BRN.HRPT.NJ.D95152.S1233.E1217.B0216060.UB"] + +LAC_KLM_FILENAMES = ["BRN.HRPT.M1.D14152.S0958.E1012.B0883232.UB", + "BRN.HRPT.M1.D14152.S1943.E1958.B0883838.UB", + "BRN.HRPT.M2.D12153.S0912.E0922.B2914747.UB", + "BRN.HRPT.NN.D12153.S0138.E0152.B3622828.UB", + "BRN.HRPT.NN.D12153.S0139.E0153.B3622828.UB", + "BRN.HRPT.NN.D12153.S1309.E1324.B3623535.UB", + "BRN.HRPT.NP.D12153.S0003.E0016.B1707272.UB", + "BRN.HRPT.NP.D12153.S1134.E1148.B1707979.UB", + "BRN.HRPT.NP.D16184.S1256.E1311.B3813131.UB", + "BRN.HRPT.NP.D16184.S1438.E1451.B3813232.UB", + "BRN.HRPT.NP.D16184.S1439.E1451.B3813232.UB", + "BRN.HRPT.NP.D16185.S1245.E1259.B3814545.UB", + "BRN.HRPT.NP.D16185.S1427.E1440.B3814646.UB", + "NSS.FRAC.M2.D12153.S1729.E1910.B2915354.SV", + "NSS.LHRR.NP.D16306.S1803.E1814.B3985555.WI"] + + +@mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__", return_value=None) def _get_fh_mocked(init_mock, **attrs): """Create a mocked file handler with the given attributes.""" from satpy.readers.avhrr_l1b_gaclac import GACLACFile @@ -82,11 +82,11 @@ def _get_fh_mocked(init_mock, **attrs): def _get_reader_mocked(along_track=3): """Create a mocked reader.""" - reader = mock.MagicMock(spacecraft_name='spacecraft_name', - meta_data={'foo': 'bar'}) + reader = mock.MagicMock(spacecraft_name="spacecraft_name", + meta_data={"foo": "bar"}) reader.mask = [0, 0] reader.get_times.return_value = np.arange(along_track) - reader.get_tle_lines.return_value = 'tle' + reader.get_tle_lines.return_value = "tle" return reader @@ -98,16 +98,16 @@ def setUp(self): self.pygac = mock.MagicMock() self.fhs = mock.MagicMock() modules = { - 'pygac': self.pygac, - 'pygac.gac_klm': self.pygac.gac_klm, - 'pygac.gac_pod': self.pygac.gac_pod, - 'pygac.lac_klm': self.pygac.lac_klm, - 'pygac.lac_pod': self.pygac.lac_pod, - 'pygac.utils': self.pygac.utils, - 'pygac.calibration': self.pygac.calibration, + "pygac": self.pygac, + "pygac.gac_klm": self.pygac.gac_klm, + "pygac.gac_pod": self.pygac.gac_pod, + "pygac.lac_klm": self.pygac.lac_klm, + "pygac.lac_pod": self.pygac.lac_pod, + "pygac.utils": self.pygac.utils, + "pygac.calibration": self.pygac.calibration, } - self.module_patcher = mock.patch.dict('sys.modules', modules) + self.module_patcher = mock.patch.dict("sys.modules", modules) self.module_patcher.start() def tearDown(self): @@ -131,7 +131,7 @@ def setUp(self): class TestGACLACFile(GACLACFilePatcher): """Test the GACLAC file handler.""" - def _get_fh(self, filename='NSS.GHRR.NG.D88002.S0614.E0807.B0670506.WI', + def _get_fh(self, filename="NSS.GHRR.NG.D88002.S0614.E0807.B0670506.WI", **kwargs): """Create a file handler.""" from trollsift import parse @@ -145,39 +145,37 @@ def test_init(self): from pygac.lac_klm import LACKLMReader from pygac.lac_pod import LACPODReader - kwargs = {'start_line': 1, - 'end_line': 2, - 'strip_invalid_coords': True, - 'interpolate_coords': True, - 'adjust_clock_drift': True, - 'tle_dir': 'tle_dir', - 'tle_name': 'tle_name', - 'tle_thresh': 123, - 'calibration': 'calibration'} + kwargs = {"start_line": 1, + "end_line": 2, + "strip_invalid_coords": True, + "interpolate_coords": True, + "adjust_clock_drift": True, + "tle_dir": "tle_dir", + "tle_name": "tle_name", + "tle_thresh": 123, + "calibration": "calibration"} for filenames, reader_cls in zip([GAC_POD_FILENAMES, GAC_KLM_FILENAMES, LAC_POD_FILENAMES, LAC_KLM_FILENAMES], [GACPODReader, GACKLMReader, LACPODReader, LACKLMReader]): for filename in filenames: fh = self._get_fh(filename, **kwargs) - self.assertLess(fh.start_time, fh.end_time, - "Start time must precede end time.") - self.assertIs(fh.reader_class, reader_cls, - 'Wrong reader class assigned to {}'.format(filename)) + assert fh.start_time < fh.end_time + assert fh.reader_class is reader_cls def test_read_raw_data(self): """Test raw data reading.""" fh = _get_fh_mocked(reader=None, - interpolate_coords='interpolate_coords', - creation_site='creation_site', - reader_kwargs={'foo': 'bar'}, - filename='myfile') + interpolate_coords="interpolate_coords", + creation_site="creation_site", + reader_kwargs={"foo": "bar"}, + filename="myfile") reader = mock.MagicMock(mask=[0]) reader_cls = mock.MagicMock(return_value=reader) fh.reader_class = reader_cls fh.read_raw_data() - reader_cls.assert_called_with(interpolate_coords='interpolate_coords', - creation_site='creation_site', - foo='bar') - reader.read.assert_called_with('myfile') + reader_cls.assert_called_with(interpolate_coords="interpolate_coords", + creation_site="creation_site", + foo="bar") + reader.read.assert_called_with("myfile") # Test exception if all data is masked reader.mask = [1] @@ -185,9 +183,9 @@ def test_read_raw_data(self): with self.assertRaises(ValueError): fh.read_raw_data() - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs') - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.slice') - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel') + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.slice") + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel") def test_get_dataset_slice(self, get_channel, slc, *mocks): """Get a slice of a dataset.""" from satpy.tests.utils import make_dataid @@ -206,28 +204,28 @@ def slice_patched(data, times): acq = np.array([0, 1, 2, 3, 4]) slc.side_effect = slice_patched get_channel.return_value = ch - kwargs_list = [{'strip_invalid_coords': False, - 'start_line': 123, 'end_line': 456}, - {'strip_invalid_coords': True, - 'start_line': None, 'end_line': None}, - {'strip_invalid_coords': True, - 'start_line': 123, 'end_line': 456}] + kwargs_list = [{"strip_invalid_coords": False, + "start_line": 123, "end_line": 456}, + {"strip_invalid_coords": True, + "start_line": None, "end_line": None}, + {"strip_invalid_coords": True, + "start_line": 123, "end_line": 456}] for kwargs in kwargs_list: fh = _get_fh_mocked( reader=_get_reader_mocked(along_track=len(acq)), - chn_dict={'1': 0}, + chn_dict={"1": 0}, **kwargs ) - key = make_dataid(name='1', calibration='reflectance') - info = {'name': '1', 'standard_name': 'reflectance'} + key = make_dataid(name="1", calibration="reflectance") + info = {"name": "1", "standard_name": "reflectance"} res = fh.get_dataset(key, info) np.testing.assert_array_equal(res.data, ch[1:3, :]) - np.testing.assert_array_equal(res.coords['acq_time'].data, acq[1:3]) - np.testing.assert_array_equal(slc.call_args_list[-1][1]['times'], acq) - np.testing.assert_array_equal(slc.call_args_list[-1][1]['data'], ch) + np.testing.assert_array_equal(res.coords["acq_time"].data, acq[1:3]) + np.testing.assert_array_equal(slc.call_args_list[-1][1]["times"], acq) + np.testing.assert_array_equal(slc.call_args_list[-1][1]["data"], ch) - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs') + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") def test_get_dataset_latlon(self, *mocks): """Test getting the latitudes and longitudes.""" from satpy.tests.utils import make_dataid @@ -245,26 +243,26 @@ def test_get_dataset_latlon(self, *mocks): ) # With interpolation of coordinates - for name, exp_data in zip(['longitude', 'latitude'], [lons, lats]): + for name, exp_data in zip(["longitude", "latitude"], [lons, lats]): key = make_dataid(name=name) - info = {'name': name, 'standard_name': 'my_standard_name'} + info = {"name": name, "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) exp = xr.DataArray(exp_data, name=res.name, - dims=('y', 'x'), - coords={'acq_time': ('y', [0, 1, 2])}) + dims=("y", "x"), + coords={"acq_time": ("y", [0, 1, 2])}) xr.testing.assert_equal(res, exp) # Without interpolation of coordinates fh.interpolate_coords = False - for name, _exp_data in zip(['longitude', 'latitude'], [lons, lats]): + for name, _exp_data in zip(["longitude", "latitude"], [lons, lats]): key = make_dataid(name=name) - info = {'name': name, 'standard_name': 'my_standard_name'} + info = {"name": name, "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) - self.assertTupleEqual(res.dims, ('y', 'x_every_eighth')) + assert res.dims == ("y", "x_every_eighth") - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs') - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_angle') + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_angle") def test_get_dataset_angles(self, get_angle, *mocks): """Test getting the angles.""" from satpy.readers.avhrr_l1b_gaclac import ANGLES @@ -284,23 +282,23 @@ def test_get_dataset_angles(self, get_angle, *mocks): # With interpolation of coordinates for angle in ANGLES: key = make_dataid(name=angle) - info = {'name': angle, 'standard_name': 'my_standard_name'} + info = {"name": angle, "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) exp = xr.DataArray(ones, name=res.name, - dims=('y', 'x'), - coords={'acq_time': ('y', [0, 1, 2])}) + dims=("y", "x"), + coords={"acq_time": ("y", [0, 1, 2])}) xr.testing.assert_equal(res, exp) # Without interpolation of coordinates fh.interpolate_coords = False for angle in ANGLES: key = make_dataid(name=angle) - info = {'name': angle, 'standard_name': 'my_standard_name'} + info = {"name": angle, "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) - self.assertTupleEqual(res.dims, ('y', 'x_every_eighth')) + assert res.dims == ("y", "x_every_eighth") - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs') + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._update_attrs") def test_get_dataset_qual_flags(self, *mocks): """Test getting the qualitiy flags.""" from satpy.tests.utils import make_dataid @@ -316,20 +314,20 @@ def test_get_dataset_qual_flags(self, *mocks): interpolate_coords=True ) - key = make_dataid(name='qual_flags') - info = {'name': 'qual_flags'} + key = make_dataid(name="qual_flags") + info = {"name": "qual_flags"} res = fh.get_dataset(key=key, info=info) exp = xr.DataArray(qual_flags, name=res.name, - dims=('y', 'num_flags'), - coords={'acq_time': ('y', [0, 1, 2]), - 'num_flags': ['Scan line number', - 'Fatal error flag', - 'Insufficient data for calibration', - 'Insufficient data for calibration', - 'Solar contamination of blackbody in channels 3', - 'Solar contamination of blackbody in channels 4', - 'Solar contamination of blackbody in channels 5']}) + dims=("y", "num_flags"), + coords={"acq_time": ("y", [0, 1, 2]), + "num_flags": ["Scan line number", + "Fatal error flag", + "Insufficient data for calibration", + "Insufficient data for calibration", + "Solar contamination of blackbody in channels 3", + "Solar contamination of blackbody in channels 4", + "Solar contamination of blackbody in channels 5"]}) xr.testing.assert_equal(res, exp) def test_get_channel(self): @@ -343,36 +341,34 @@ def test_get_channel(self): reader.get_counts.return_value = counts reader.get_calibrated_channels.return_value = calib_channels fh = _get_fh_mocked(reader=reader, counts=None, calib_channels=None, - chn_dict={'1': 0}) + chn_dict={"1": 0}) - key = make_dataid(name='1', calibration='counts') + key = make_dataid(name="1", calibration="counts") # Counts res = fh._get_channel(key=key) - np.testing.assert_array_equal(res, [[1, 2, 3], - [4, 5, 6]]) + np.testing.assert_array_equal(res, [[1, 2, 3], [4, 5, 6]]) np.testing.assert_array_equal(fh.counts, counts) # Reflectance and Brightness Temperature - for calib in ['reflectance', 'brightness_temperature']: - key = make_dataid(name='1', calibration=calib) + for calib in ["reflectance", "brightness_temperature"]: + key = make_dataid(name="1", calibration=calib) res = fh._get_channel(key=key) - np.testing.assert_array_equal(res, [[2, 4, 6], - [8, 10, 12]]) + np.testing.assert_array_equal(res, [[2, 4, 6], [8, 10, 12]]) np.testing.assert_array_equal(fh.calib_channels, calib_channels) # Invalid - with pytest.raises(ValueError): - key = make_dataid(name='7', calibration='coffee') + with pytest.raises(ValueError, match="coffee invalid value for "): + _ = make_dataid(name="7", calibration="coffee") # Buffering reader.get_counts.reset_mock() - key = make_dataid(name='1', calibration='counts') + key = make_dataid(name="1", calibration="counts") fh._get_channel(key=key) reader.get_counts.assert_not_called() reader.get_calibrated_channels.reset_mock() - for calib in ['reflectance', 'brightness_temperature']: - key = make_dataid(name='1', calibration=calib) + for calib in ["reflectance", "brightness_temperature"]: + key = make_dataid(name="1", calibration=calib) fh._get_channel(key) reader.get_calibrated_channels.assert_not_called() @@ -385,17 +381,15 @@ def test_get_angle(self): fh = _get_fh_mocked(reader=reader, angles=None) # Test angle readout - key = make_dataid(name='sensor_zenith_angle') + key = make_dataid(name="sensor_zenith_angle") res = fh._get_angle(key) - self.assertEqual(res, 2) - self.assertDictEqual(fh.angles, {'sensor_zenith_angle': 2, - 'sensor_azimuth_angle': 1, - 'solar_zenith_angle': 4, - 'solar_azimuth_angle': 3, - 'sun_sensor_azimuth_difference_angle': 5}) + assert res == 2 + assert fh.angles == {"sensor_zenith_angle": 2, "sensor_azimuth_angle": 1, + "solar_zenith_angle": 4, "solar_azimuth_angle": 3, + "sun_sensor_azimuth_difference_angle": 5} # Test buffering - key = make_dataid(name='sensor_azimuth_angle') + key = make_dataid(name="sensor_azimuth_angle") fh._get_angle(key) reader.get_angles.assert_called_once() @@ -410,14 +404,14 @@ def test_strip_invalid_lat(self): # Test stripping pygac.utils.strip_invalid_lat.return_value = 1, 2 start, end = fh._strip_invalid_lat() - self.assertTupleEqual((start, end), (1, 2)) + assert (start, end) == (1, 2) # Test buffering fh._strip_invalid_lat() pygac.utils.strip_invalid_lat.assert_called_once() - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._slice') - def test_slice(self, _slice): + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._slice") + def test_slice(self, _slice): # noqa: PT019 """Test slicing.""" def _slice_patched(data): @@ -425,31 +419,31 @@ def _slice_patched(data): _slice.side_effect = _slice_patched data = np.zeros((4, 2)) - times = np.array([1, 2, 3, 4], dtype='datetime64[us]') + times = np.array([1, 2, 3, 4], dtype="datetime64[us]") fh = _get_fh_mocked(start_line=1, end_line=3, strip_invalid_coords=False) data_slc, times_slc = fh.slice(data, times) np.testing.assert_array_equal(data_slc, data[1:3]) np.testing.assert_array_equal(times_slc, times[1:3]) - self.assertEqual(fh.start_time, datetime(1970, 1, 1, 0, 0, 0, 2)) - self.assertEqual(fh.end_time, datetime(1970, 1, 1, 0, 0, 0, 3)) + assert fh.start_time == datetime(1970, 1, 1, 0, 0, 0, 2) + assert fh.end_time == datetime(1970, 1, 1, 0, 0, 0, 3) - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_qual_flags') - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._strip_invalid_lat') + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_qual_flags") + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._strip_invalid_lat") def test__slice(self, strip_invalid_lat, get_qual_flags): """Test slicing.""" import pygac.utils pygac.utils.check_user_scanlines.return_value = 1, 2 - pygac.utils.slice_channel.return_value = 'sliced' + pygac.utils.slice_channel.return_value = "sliced" strip_invalid_lat.return_value = 3, 4 - get_qual_flags.return_value = 'qual_flags' + get_qual_flags.return_value = "qual_flags" data = np.zeros((2, 2)) # a) Only start/end line given fh = _get_fh_mocked(start_line=5, end_line=6, strip_invalid_coords=False) data_slc = fh._slice(data) - self.assertEqual(data_slc, 'sliced') + assert data_slc == "sliced" pygac.utils.check_user_scanlines.assert_called_with( start_line=5, end_line=6, first_valid_lat=None, last_valid_lat=None, along_track=2) @@ -472,9 +466,9 @@ def test__slice(self, strip_invalid_lat, get_qual_flags): first_valid_lat=3, last_valid_lat=4, along_track=2) # Test slicing with older pygac versions - pygac.utils.slice_channel.return_value = ('sliced', 'foo', 'bar') + pygac.utils.slice_channel.return_value = ("sliced", "foo", "bar") data_slc = fh._slice(data) - self.assertEqual(data_slc, 'sliced') + assert data_slc == "sliced" class TestGetDataset(GACLACFilePatcher): @@ -483,21 +477,21 @@ class TestGetDataset(GACLACFilePatcher): def setUp(self): """Set up the instance.""" self.exp = xr.DataArray(da.ones((3, 3)), - name='1', - dims=('y', 'x'), - coords={'acq_time': ('y', [0, 1, 2])}, - attrs={'name': '1', - 'platform_name': 'spacecraft_name', - 'orbit_number': 123, - 'sensor': 'sensor', - 'foo': 'bar', - 'standard_name': 'my_standard_name'}) - self.exp.coords['acq_time'].attrs['long_name'] = 'Mean scanline acquisition time' + name="1", + dims=("y", "x"), + coords={"acq_time": ("y", [0, 1, 2])}, + attrs={"name": "1", + "platform_name": "spacecraft_name", + "orbit_number": 123, + "sensor": "sensor", + "foo": "bar", + "standard_name": "my_standard_name"}) + self.exp.coords["acq_time"].attrs["long_name"] = "Mean scanline acquisition time" super().setUp() - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__', return_value=None) - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.read_raw_data') - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel', return_value=np.ones((3, 3))) + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__", return_value=None) + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.read_raw_data") + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel", return_value=np.ones((3, 3))) def test_get_dataset_channels(self, get_channel, *mocks): """Test getting the channel datasets.""" pygac_reader = _get_reader_mocked() @@ -506,7 +500,7 @@ def test_get_dataset_channels(self, get_channel, *mocks): # Test calibration to reflectance as well as attributes. key, res = self._get_dataset(fh) exp = self._create_expected(res.name) - exp.attrs['orbital_parameters'] = {'tle': 'tle'} + exp.attrs["orbital_parameters"] = {"tle": "tle"} xr.testing.assert_identical(res, exp) get_channel.assert_called_with(key) @@ -517,8 +511,8 @@ def test_get_dataset_channels(self, get_channel, *mocks): def _get_dataset(fh): from satpy.tests.utils import make_dataid - key = make_dataid(name='1', calibration='reflectance') - info = {'name': '1', 'standard_name': 'my_standard_name'} + key = make_dataid(name="1", calibration="reflectance") + info = {"name": "1", "standard_name": "my_standard_name"} res = fh.get_dataset(key=key, info=info) return key, res @@ -527,12 +521,12 @@ def _create_file_handler(reader): """Mock reader and file handler.""" fh = _get_fh_mocked( reader=reader, - chn_dict={'1': 0, '5': 0}, + chn_dict={"1": 0, "5": 0}, start_line=None, end_line=None, strip_invalid_coords=False, - filename_info={'orbit_number': 123}, - sensor='sensor', + filename_info={"orbit_number": 123}, + sensor="sensor", ) return fh @@ -540,20 +534,20 @@ def _create_file_handler(reader): def _create_expected(name): exp = xr.DataArray(da.ones((3, 3)), name=name, - dims=('y', 'x'), - coords={'acq_time': ('y', [0, 1, 2])}, - attrs={'name': '1', - 'platform_name': 'spacecraft_name', - 'orbit_number': 123, - 'sensor': 'sensor', - 'foo': 'bar', - 'standard_name': 'my_standard_name'}) - exp.coords['acq_time'].attrs['long_name'] = 'Mean scanline acquisition time' + dims=("y", "x"), + coords={"acq_time": ("y", [0, 1, 2])}, + attrs={"name": "1", + "platform_name": "spacecraft_name", + "orbit_number": 123, + "sensor": "sensor", + "foo": "bar", + "standard_name": "my_standard_name"}) + exp.coords["acq_time"].attrs["long_name"] = "Mean scanline acquisition time" return exp - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__', return_value=None) - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile.read_raw_data') - @mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel', return_value=np.ones((3, 3))) + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.__init__", return_value=None) + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile.read_raw_data") + @mock.patch("satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_channel", return_value=np.ones((3, 3))) def test_get_dataset_no_tle(self, get_channel, *mocks): """Test getting the channel datasets when no TLEs are present.""" pygac_reader = _get_reader_mocked() @@ -575,7 +569,7 @@ def _check_get_channel_calls(fh, get_channel): """Check _get_channel() calls.""" from satpy.tests.utils import make_dataid - for key in [make_dataid(name='1', calibration='counts'), - make_dataid(name='5', calibration='brightness_temperature')]: - fh.get_dataset(key=key, info={'name': 1}) + for key in [make_dataid(name="1", calibration="counts"), + make_dataid(name="5", calibration="brightness_temperature")]: + fh.get_dataset(key=key, info={"name": 1}) get_channel.assert_called_with(key) diff --git a/satpy/tests/reader_tests/test_clavrx.py b/satpy/tests/reader_tests/test_clavrx.py index 86e0cf1fa7..bc5e968b08 100644 --- a/satpy/tests/reader_tests/test_clavrx.py +++ b/satpy/tests/reader_tests/test_clavrx.py @@ -45,64 +45,64 @@ class FakeHDF4FileHandlerPolar(FakeHDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { - '/attr/platform': 'SNPP', - '/attr/sensor': 'VIIRS', + "/attr/platform": "SNPP", + "/attr/sensor": "VIIRS", } - file_content['longitude'] = xr.DataArray( + file_content["longitude"] = xr.DataArray( da.from_array(DEFAULT_LON_DATA, chunks=4096), attrs={ - '_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'standard_name': 'longitude', + "_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "longitude", }) - file_content['longitude/shape'] = DEFAULT_FILE_SHAPE + file_content["longitude/shape"] = DEFAULT_FILE_SHAPE - file_content['latitude'] = xr.DataArray( + file_content["latitude"] = xr.DataArray( da.from_array(DEFAULT_LAT_DATA, chunks=4096), attrs={ - '_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'standard_name': 'latitude', + "_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "latitude", }) - file_content['latitude/shape'] = DEFAULT_FILE_SHAPE + file_content["latitude/shape"] = DEFAULT_FILE_SHAPE - file_content['variable1'] = xr.DataArray( + file_content["variable1"] = xr.DataArray( da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.float32), attrs={ - '_FillValue': -1, - 'scale_factor': 1., - 'add_offset': 0., - 'units': '1', + "_FillValue": -1, + "scale_factor": 1., + "add_offset": 0., + "units": "1", }) - file_content['variable1/shape'] = DEFAULT_FILE_SHAPE + file_content["variable1/shape"] = DEFAULT_FILE_SHAPE # data with fill values - file_content['variable2'] = xr.DataArray( + file_content["variable2"] = xr.DataArray( da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.float32), attrs={ - '_FillValue': -1, - 'scale_factor': 1., - 'add_offset': 0., - 'units': '1', + "_FillValue": -1, + "scale_factor": 1., + "add_offset": 0., + "units": "1", }) - file_content['variable2/shape'] = DEFAULT_FILE_SHAPE - file_content['variable2'] = file_content['variable2'].where( - file_content['variable2'] % 2 != 0) + file_content["variable2/shape"] = DEFAULT_FILE_SHAPE + file_content["variable2"] = file_content["variable2"].where( + file_content["variable2"] % 2 != 0) # category - file_content['variable3'] = xr.DataArray( + file_content["variable3"] = xr.DataArray( da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.byte), attrs={ - 'SCALED': 0, - '_FillValue': -128, - 'flag_meanings': 'clear water supercooled mixed ice unknown', - 'flag_values': [0, 1, 2, 3, 4, 5], - 'units': 'none', + "SCALED": 0, + "_FillValue": -128, + "flag_meanings": "clear water supercooled mixed ice unknown", + "flag_values": [0, 1, 2, 3, 4, 5], + "units": "none", }) - file_content['variable3/shape'] = DEFAULT_FILE_SHAPE + file_content["variable3/shape"] = DEFAULT_FILE_SHAPE return file_content @@ -116,9 +116,9 @@ def setUp(self): """Wrap HDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.clavrx import CLAVRXHDF4FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(CLAVRXHDF4FileHandler, '__bases__', (FakeHDF4FileHandlerPolar,)) + self.p = mock.patch.object(CLAVRXHDF4FileHandler, "__bases__", (FakeHDF4FileHandlerPolar,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -131,78 +131,78 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf', + "clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_available_datasets(self): """Test available_datasets with fake variables from YAML.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf', + "clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers # mimic the YAML file being configured for more datasets fake_dataset_info = [ - (None, {'name': 'variable1', 'resolution': None, 'file_type': ['clavrx_hdf4']}), - (True, {'name': 'variable2', 'resolution': 742, 'file_type': ['clavrx_hdf4']}), - (True, {'name': 'variable2', 'resolution': 1, 'file_type': ['clavrx_hdf4']}), - (None, {'name': 'variable2', 'resolution': 1, 'file_type': ['clavrx_hdf4']}), - (None, {'name': '_fake1', 'file_type': ['clavrx_hdf4']}), - (None, {'name': 'variable1', 'file_type': ['level_fake']}), - (True, {'name': 'variable3', 'file_type': ['clavrx_hdf4']}), + (None, {"name": "variable1", "resolution": None, "file_type": ["clavrx_hdf4"]}), + (True, {"name": "variable2", "resolution": 742, "file_type": ["clavrx_hdf4"]}), + (True, {"name": "variable2", "resolution": 1, "file_type": ["clavrx_hdf4"]}), + (None, {"name": "variable2", "resolution": 1, "file_type": ["clavrx_hdf4"]}), + (None, {"name": "_fake1", "file_type": ["clavrx_hdf4"]}), + (None, {"name": "variable1", "file_type": ["level_fake"]}), + (True, {"name": "variable3", "file_type": ["clavrx_hdf4"]}), ] - new_ds_infos = list(r.file_handlers['clavrx_hdf4'][0].available_datasets( + new_ds_infos = list(r.file_handlers["clavrx_hdf4"][0].available_datasets( fake_dataset_info)) - self.assertEqual(len(new_ds_infos), 9) + assert len(new_ds_infos) == 9 # we have this and can provide the resolution - self.assertTrue(new_ds_infos[0][0]) - self.assertEqual(new_ds_infos[0][1]['resolution'], 742) # hardcoded + assert new_ds_infos[0][0] + assert new_ds_infos[0][1]["resolution"] == 742 # hardcoded # we have this, but previous file handler said it knew about it # and it is producing the same resolution as what we have - self.assertTrue(new_ds_infos[1][0]) - self.assertEqual(new_ds_infos[1][1]['resolution'], 742) + assert new_ds_infos[1][0] + assert new_ds_infos[1][1]["resolution"] == 742 # we have this, but don't want to change the resolution # because a previous handler said it has it - self.assertTrue(new_ds_infos[2][0]) - self.assertEqual(new_ds_infos[2][1]['resolution'], 1) + assert new_ds_infos[2][0] + assert new_ds_infos[2][1]["resolution"] == 1 # even though the previous one was known we can still # produce it at our new resolution - self.assertTrue(new_ds_infos[3][0]) - self.assertEqual(new_ds_infos[3][1]['resolution'], 742) + assert new_ds_infos[3][0] + assert new_ds_infos[3][1]["resolution"] == 742 # we have this and can update the resolution since # no one else has claimed it - self.assertTrue(new_ds_infos[4][0]) - self.assertEqual(new_ds_infos[4][1]['resolution'], 742) + assert new_ds_infos[4][0] + assert new_ds_infos[4][1]["resolution"] == 742 # we don't have this variable, don't change it - self.assertFalse(new_ds_infos[5][0]) - self.assertIsNone(new_ds_infos[5][1].get('resolution')) + assert not new_ds_infos[5][0] + assert new_ds_infos[5][1].get("resolution") is None # we have this, but it isn't supposed to come from our file type - self.assertIsNone(new_ds_infos[6][0]) - self.assertIsNone(new_ds_infos[6][1].get('resolution')) + assert new_ds_infos[6][0] is None + assert new_ds_infos[6][1].get("resolution") is None # we could have loaded this but some other file handler said it has this - self.assertTrue(new_ds_infos[7][0]) - self.assertIsNone(new_ds_infos[7][1].get('resolution')) + assert new_ds_infos[7][0] + assert new_ds_infos[7][1].get("resolution") is None # we can add resolution to the previous dataset, so we do - self.assertTrue(new_ds_infos[8][0]) - self.assertEqual(new_ds_infos[8][1]['resolution'], 742) + assert new_ds_infos[8][0] + assert new_ds_infos[8][1]["resolution"] == 742 def test_load_all(self): """Test loading all test datasets.""" @@ -210,23 +210,23 @@ def test_load_all(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): + with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf', + "clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf", ]) r.create_filehandlers(loadables) - var_list = ['variable1', 'variable2', 'variable3'] + var_list = ["variable1", "variable2", "variable3"] datasets = r.load(var_list) - self.assertEqual(len(datasets), len(var_list)) + assert len(datasets) == len(var_list) for v in datasets.values(): - self.assertEqual(v.attrs['units'], '1') - self.assertEqual(v.attrs['platform_name'], 'npp') - self.assertEqual(v.attrs['sensor'], 'viirs') - self.assertIsInstance(v.attrs['area'], SwathDefinition) - self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 16) - self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 16) - self.assertIsInstance(datasets["variable3"].attrs.get("flag_meanings"), list) + assert v.attrs["units"] == "1" + assert v.attrs["platform_name"] == "npp" + assert v.attrs["sensor"] == "viirs" + assert isinstance(v.attrs["area"], SwathDefinition) + assert v.attrs["area"].lons.attrs["rows_per_scan"] == 16 + assert v.attrs["area"].lats.attrs["rows_per_scan"] == 16 + assert isinstance(datasets["variable3"].attrs.get("flag_meanings"), list) class FakeHDF4FileHandlerGeo(FakeHDF4FileHandler): @@ -235,72 +235,72 @@ class FakeHDF4FileHandlerGeo(FakeHDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { - '/attr/platform': 'HIM8', - '/attr/sensor': 'AHI', + "/attr/platform": "HIM8", + "/attr/sensor": "AHI", # this is a Level 2 file that came from a L1B file - '/attr/L1B': 'clavrx_H08_20180806_1800', + "/attr/L1B": "clavrx_H08_20180806_1800", } - file_content['longitude'] = xr.DataArray( + file_content["longitude"] = xr.DataArray( DEFAULT_LON_DATA, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - '_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'standard_name': 'longitude', + "_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "longitude", }) - file_content['longitude/shape'] = DEFAULT_FILE_SHAPE + file_content["longitude/shape"] = DEFAULT_FILE_SHAPE - file_content['latitude'] = xr.DataArray( + file_content["latitude"] = xr.DataArray( DEFAULT_LAT_DATA, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - '_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'standard_name': 'latitude', + "_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "latitude", }) - file_content['latitude/shape'] = DEFAULT_FILE_SHAPE + file_content["latitude/shape"] = DEFAULT_FILE_SHAPE - file_content['variable1'] = xr.DataArray( + file_content["variable1"] = xr.DataArray( DEFAULT_FILE_DATA.astype(np.float32), - dims=('y', 'x'), + dims=("y", "x"), attrs={ - '_FillValue': -1, - 'scale_factor': 1., - 'add_offset': 0., - 'units': '1', - 'valid_range': (-32767, 32767), + "_FillValue": -1, + "scale_factor": 1., + "add_offset": 0., + "units": "1", + "valid_range": (-32767, 32767), }) - file_content['variable1/shape'] = DEFAULT_FILE_SHAPE + file_content["variable1/shape"] = DEFAULT_FILE_SHAPE # data with fill values - file_content['variable2'] = xr.DataArray( + file_content["variable2"] = xr.DataArray( DEFAULT_FILE_DATA.astype(np.float32), - dims=('y', 'x'), + dims=("y", "x"), attrs={ - '_FillValue': -1, - 'scale_factor': 1., - 'add_offset': 0., - 'units': '1', + "_FillValue": -1, + "scale_factor": 1., + "add_offset": 0., + "units": "1", }) - file_content['variable2/shape'] = DEFAULT_FILE_SHAPE - file_content['variable2'] = file_content['variable2'].where( - file_content['variable2'] % 2 != 0) + file_content["variable2/shape"] = DEFAULT_FILE_SHAPE + file_content["variable2"] = file_content["variable2"].where( + file_content["variable2"] % 2 != 0) # category - file_content['variable3'] = xr.DataArray( + file_content["variable3"] = xr.DataArray( DEFAULT_FILE_DATA.astype(np.byte), - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'SCALED': 0, - '_FillValue': -128, - 'flag_meanings': 'clear water supercooled mixed ice unknown', - 'flag_values': [0, 1, 2, 3, 4, 5], - 'units': '1', + "SCALED": 0, + "_FillValue": -128, + "flag_meanings": "clear water supercooled mixed ice unknown", + "flag_values": [0, 1, 2, 3, 4, 5], + "units": "1", }) - file_content['variable3/shape'] = DEFAULT_FILE_SHAPE + file_content["variable3/shape"] = DEFAULT_FILE_SHAPE return file_content @@ -314,9 +314,9 @@ def setUp(self): """Wrap HDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.clavrx import CLAVRXHDF4FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(CLAVRXHDF4FileHandler, '__bases__', (FakeHDF4FileHandlerGeo,)) + self.p = mock.patch.object(CLAVRXHDF4FileHandler, "__bases__", (FakeHDF4FileHandlerGeo,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -329,12 +329,12 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'clavrx_H08_20180806_1800.level2.hdf', + "clavrx_H08_20180806_1800.level2.hdf", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_no_nav_donor(self): """Test exception raised when no donor file is available.""" @@ -342,12 +342,12 @@ def test_no_nav_donor(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): + with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'clavrx_H08_20180806_1800.level2.hdf', + "clavrx_H08_20180806_1800.level2.hdf", ]) r.create_filehandlers(loadables) - self.assertRaises(IOError, r.load, ['variable1', 'variable2', 'variable3']) + self.assertRaises(IOError, r.load, ["variable1", "variable2", "variable3"]) def test_load_all_old_donor(self): """Test loading all test datasets with old donor.""" @@ -355,13 +355,13 @@ def test_load_all_old_donor(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): + with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'clavrx_H08_20180806_1800.level2.hdf', + "clavrx_H08_20180806_1800.level2.hdf", ]) r.create_filehandlers(loadables) - with mock.patch('satpy.readers.clavrx.glob') as g, mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: - g.return_value = ['fake_donor.nc'] + with mock.patch("satpy.readers.clavrx.glob") as g, mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: + g.return_value = ["fake_donor.nc"] x = np.linspace(-0.1518, 0.1518, 300) y = np.linspace(0.1518, -0.1518, 10) proj = mock.Mock( @@ -369,29 +369,29 @@ def test_load_all_old_donor(self): semi_minor_axis=6356.7523142, perspective_point_height=35791, longitude_of_projection_origin=140.7, - sweep_angle_axis='y', + sweep_angle_axis="y", ) d.return_value = fake_donor = mock.MagicMock( - variables={'Projection': proj, 'x': x, 'y': y}, + variables={"Projection": proj, "x": x, "y": y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] - datasets = r.load(['variable1', 'variable2', 'variable3']) - self.assertEqual(len(datasets), 3) + datasets = r.load(["variable1", "variable2", "variable3"]) + assert len(datasets) == 3 for v in datasets.values(): - self.assertNotIn('calibration', v.attrs) - self.assertEqual(v.attrs['units'], '1') - self.assertIsInstance(v.attrs['area'], AreaDefinition) + assert "calibration" not in v.attrs + assert v.attrs["units"] == "1" + assert isinstance(v.attrs["area"], AreaDefinition) if v.attrs.get("flag_values"): - self.assertIn('_FillValue', v.attrs) + assert "_FillValue" in v.attrs else: - self.assertNotIn('_FillValue', v.attrs) - if v.attrs["name"] == 'variable1': - self.assertIsInstance(v.attrs["valid_range"], list) + assert "_FillValue" not in v.attrs + if v.attrs["name"] == "variable1": + assert isinstance(v.attrs["valid_range"], list) else: - self.assertNotIn('valid_range', v.attrs) - if 'flag_values' in v.attrs: - self.assertTrue(np.issubdtype(v.dtype, np.integer)) - self.assertIsNotNone(v.attrs.get('flag_meanings')) + assert "valid_range" not in v.attrs + if "flag_values" in v.attrs: + assert np.issubdtype(v.dtype, np.integer) + assert v.attrs.get("flag_meanings") is not None def test_load_all_new_donor(self): """Test loading all test datasets with new donor.""" @@ -399,13 +399,13 @@ def test_load_all_new_donor(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray): + with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'clavrx_H08_20180806_1800.level2.hdf', + "clavrx_H08_20180806_1800.level2.hdf", ]) r.create_filehandlers(loadables) - with mock.patch('satpy.readers.clavrx.glob') as g, mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: - g.return_value = ['fake_donor.nc'] + with mock.patch("satpy.readers.clavrx.glob") as g, mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: + g.return_value = ["fake_donor.nc"] x = np.linspace(-0.1518, 0.1518, 300) y = np.linspace(0.1518, -0.1518, 10) proj = mock.Mock( @@ -413,19 +413,19 @@ def test_load_all_new_donor(self): semi_minor_axis=6356752.3142, perspective_point_height=35791000, longitude_of_projection_origin=140.7, - sweep_angle_axis='y', + sweep_angle_axis="y", ) d.return_value = fake_donor = mock.MagicMock( - variables={'goes_imager_projection': proj, 'x': x, 'y': y}, + variables={"goes_imager_projection": proj, "x": x, "y": y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] - datasets = r.load(['variable1', 'variable2', 'variable3']) - self.assertEqual(len(datasets), 3) + datasets = r.load(["variable1", "variable2", "variable3"]) + assert len(datasets) == 3 for v in datasets.values(): - self.assertNotIn('calibration', v.attrs) - self.assertEqual(v.attrs['units'], '1') - self.assertIsInstance(v.attrs['area'], AreaDefinition) - self.assertTrue(v.attrs['area'].is_geostationary) - self.assertEqual(v.attrs['platform_name'], 'himawari8') - self.assertEqual(v.attrs['sensor'], 'ahi') - self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) + assert "calibration" not in v.attrs + assert v.attrs["units"] == "1" + assert isinstance(v.attrs["area"], AreaDefinition) + assert v.attrs["area"].is_geostationary + assert v.attrs["platform_name"] == "himawari8" + assert v.attrs["sensor"] == "ahi" + assert datasets["variable3"].attrs.get("flag_meanings") is not None diff --git a/satpy/tests/reader_tests/test_clavrx_nc.py b/satpy/tests/reader_tests/test_clavrx_nc.py index ea0dcaed9b..33be29078a 100644 --- a/satpy/tests/reader_tests/test_clavrx_nc.py +++ b/satpy/tests/reader_tests/test_clavrx_nc.py @@ -36,75 +36,75 @@ DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0) DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) -AHI_FILE = 'clavrx_H08_20210603_1500_B01_FLDK_R.level2.nc' +AHI_FILE = "clavrx_H08_20210603_1500_B01_FLDK_R.level2.nc" def fake_test_content(filename, **kwargs): """Mimic reader input file content.""" attrs = { - 'platform': 'HIM8', - 'sensor': 'AHI', + "platform": "HIM8", + "sensor": "AHI", # this is a Level 2 file that came from a L1B file - 'L1B': 'clavrx_H08_20210603_1500_B01_FLDK_R', + "L1B": "clavrx_H08_20210603_1500_B01_FLDK_R", } longitude = xr.DataArray(DEFAULT_LON_DATA, - dims=('scan_lines_along_track_direction', - 'pixel_elements_along_scan_direction'), - attrs={'_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'standard_name': 'longitude', - 'units': 'degrees_east' + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "longitude", + "units": "degrees_east" }) latitude = xr.DataArray(DEFAULT_LAT_DATA, - dims=('scan_lines_along_track_direction', - 'pixel_elements_along_scan_direction'), - attrs={'_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'standard_name': 'latitude', - 'units': 'degrees_south' + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "standard_name": "latitude", + "units": "degrees_south" }) variable1 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.float32), - dims=('scan_lines_along_track_direction', - 'pixel_elements_along_scan_direction'), - attrs={'_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'units': '1', - 'valid_range': [-32767, 32767], + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "units": "1", + "valid_range": [-32767, 32767], }) # data with fill values variable2 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.float32), - dims=('scan_lines_along_track_direction', - 'pixel_elements_along_scan_direction'), - attrs={'_FillValue': np.nan, - 'scale_factor': 1., - 'add_offset': 0., - 'units': '1', - 'valid_range': [-32767, 32767], + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"_FillValue": np.nan, + "scale_factor": 1., + "add_offset": 0., + "units": "1", + "valid_range": [-32767, 32767], }) variable2 = variable2.where(variable2 % 2 != 0) # category variable3 = xr.DataArray(DEFAULT_FILE_FLAGS, - dims=('scan_lines_along_track_direction', - 'pixel_elements_along_scan_direction'), - attrs={'SCALED': 0, - '_FillValue': -127, - 'units': '1', - 'flag_values': [0, 1, 2, 3]}) + dims=("scan_lines_along_track_direction", + "pixel_elements_along_scan_direction"), + attrs={"SCALED": 0, + "_FillValue": -127, + "units": "1", + "flag_values": [0, 1, 2, 3]}) ds_vars = { - 'longitude': longitude, - 'latitude': latitude, - 'variable1': variable1, - 'variable2': variable2, - 'variable3': variable3 + "longitude": longitude, + "latitude": latitude, + "variable1": variable1, + "variable2": variable2, + "variable3": variable3 } ds = xr.Dataset(ds_vars, attrs=attrs) @@ -121,7 +121,7 @@ class TestCLAVRXReaderGeo: def setup_method(self): """Read fake data.""" from satpy._config import config_search_paths - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) @pytest.mark.parametrize( ("filenames", "expected_loadables"), @@ -130,7 +130,7 @@ def setup_method(self): def test_reader_creation(self, filenames, expected_loadables): """Test basic initialization.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od: + with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: od.side_effect = fake_test_content r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) @@ -141,12 +141,12 @@ def test_reader_creation(self, filenames, expected_loadables): @pytest.mark.parametrize( ("filenames", "expected_datasets"), - [([AHI_FILE], ['variable1', 'variable2', 'variable3']), ] + [([AHI_FILE], ["variable1", "variable2", "variable3"]), ] ) def test_available_datasets(self, filenames, expected_datasets): """Test that variables are dynamically discovered.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od: + with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: od.side_effect = fake_test_content r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) @@ -157,19 +157,19 @@ def test_available_datasets(self, filenames, expected_datasets): @pytest.mark.parametrize( ("filenames", "loadable_ids"), - [([AHI_FILE], ['variable1', 'variable2', 'variable3']), ] + [([AHI_FILE], ["variable1", "variable2", "variable3"]), ] ) def test_load_all_new_donor(self, filenames, loadable_ids): """Test loading all test datasets with new donor.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od: + with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od: od.side_effect = fake_test_content r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables) - with mock.patch('satpy.readers.clavrx.glob') as g, \ - mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d: - g.return_value = ['fake_donor.nc'] + with mock.patch("satpy.readers.clavrx.glob") as g, \ + mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d: + g.return_value = ["fake_donor.nc"] x = np.linspace(-0.1518, 0.1518, 300) y = np.linspace(0.1518, -0.1518, 10) proj = mock.Mock( @@ -177,26 +177,26 @@ def test_load_all_new_donor(self, filenames, loadable_ids): semi_minor_axis=6356752.3142, perspective_point_height=35791000, longitude_of_projection_origin=140.7, - sweep_angle_axis='y', + sweep_angle_axis="y", ) d.return_value = fake_donor = mock.MagicMock( - variables={'goes_imager_projection': proj, 'x': x, 'y': y}, + variables={"goes_imager_projection": proj, "x": x, "y": y}, ) fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key] datasets = r.load(loadable_ids) assert len(datasets) == 3 for v in datasets.values(): - assert 'calibration' not in v.attrs - assert v.attrs['units'] == '1' - assert isinstance(v.attrs['area'], AreaDefinition) - assert v.attrs['platform_name'] == 'himawari8' - assert v.attrs['sensor'] == 'ahi' - assert 'rows_per_scan' not in v.coords.get('longitude').attrs + assert "calibration" not in v.attrs + assert v.attrs["units"] == "1" + assert isinstance(v.attrs["area"], AreaDefinition) + assert v.attrs["platform_name"] == "himawari8" + assert v.attrs["sensor"] == "ahi" + assert "rows_per_scan" not in v.coords.get("longitude").attrs if v.attrs["name"] in ["variable1", "variable2"]: assert isinstance(v.attrs["valid_range"], list) assert v.dtype == np.float32 assert "_FillValue" not in v.attrs.keys() else: - assert (datasets['variable3'].attrs.get('flag_meanings')) is not None - assert (datasets['variable3'].attrs.get('flag_meanings') == '') + assert (datasets["variable3"].attrs.get("flag_meanings")) is not None + assert (datasets["variable3"].attrs.get("flag_meanings") == "") assert np.issubdtype(v.dtype, np.integer) diff --git a/satpy/tests/reader_tests/test_cmsaf_claas.py b/satpy/tests/reader_tests/test_cmsaf_claas.py index 9c7c7e0089..4615662b32 100644 --- a/satpy/tests/reader_tests/test_cmsaf_claas.py +++ b/satpy/tests/reader_tests/test_cmsaf_claas.py @@ -17,7 +17,7 @@ # satpy. If not, see . """Tests for the 'cmsaf-claas2_l2_nc' reader.""" -import datetime +import datetime # noqa: I001 import os import numpy as np @@ -41,7 +41,7 @@ def start_time(request): return request.param -@pytest.fixture +@pytest.fixture() def start_time_str(start_time): """Get string representation of the start time.""" return start_time.strftime("%Y-%m-%dT%H:%M:%SZ") @@ -81,7 +81,7 @@ def fake_dataset(start_time_str): ) -@pytest.fixture +@pytest.fixture() def encoding(): """Dataset encoding.""" return { @@ -89,15 +89,15 @@ def encoding(): } -@pytest.fixture +@pytest.fixture() def fake_file(fake_dataset, encoding, tmp_path): """Write a fake dataset to file.""" filename = tmp_path / "CPPin20140101001500305SVMSG01MD.nc" fake_dataset.to_netcdf(filename, encoding=encoding) - yield filename + return filename -@pytest.fixture +@pytest.fixture() def fake_files(fake_dataset, encoding, tmp_path): """Write the same fake dataset into two different files.""" filenames = [ @@ -106,10 +106,10 @@ def fake_files(fake_dataset, encoding, tmp_path): ] for filename in filenames: fake_dataset.to_netcdf(filename, encoding=encoding) - yield filenames + return filenames -@pytest.fixture +@pytest.fixture() def reader(): """Return reader for CMSAF CLAAS-2.""" from satpy._config import config_search_paths @@ -137,14 +137,14 @@ def test_file_pattern(reader): class TestCLAAS2MultiFile: """Test reading multiple CLAAS-2 files.""" - @pytest.fixture + @pytest.fixture() def multi_file_reader(self, reader, fake_files): """Create a multi-file reader.""" loadables = reader.select_files_from_pathnames(fake_files) reader.create_filehandlers(loadables) return reader - @pytest.fixture + @pytest.fixture() def multi_file_dataset(self, multi_file_reader): """Load datasets from multiple files.""" ds_ids = [make_dataid(name=name) for name in ["cph", "ctt"]] @@ -157,7 +157,7 @@ def test_combine_timestamps(self, multi_file_reader, start_time): assert multi_file_reader.end_time == datetime.datetime(2085, 8, 13, 13, 15) @pytest.mark.parametrize( - "ds_name,expected", + ("ds_name", "expected"), [ ("cph", [[0, 1], [2, 0], [0, 1], [2, 0]]), ("ctt", [[280, 290], [300, 310], [280, 290], [300, 310]]), @@ -177,20 +177,20 @@ def test_number_of_datasets(self, multi_file_dataset): class TestCLAAS2SingleFile: """Test reading a single CLAAS2 file.""" - @pytest.fixture + @pytest.fixture() def file_handler(self, fake_file): """Return a CLAAS-2 file handler.""" from satpy.readers.cmsaf_claas2 import CLAAS2 return CLAAS2(fake_file, {}, {}) - @pytest.fixture + @pytest.fixture() def area_extent_exp(self, start_time): """Get expected area extent.""" if start_time < datetime.datetime(2017, 12, 6): return (-5454733.160460291, -5454733.160460292, 5454733.160460292, 5454733.160460291) return (-5456233.362099582, -5453232.958821001, 5453232.958821001, 5456233.362099582) - @pytest.fixture + @pytest.fixture() def area_exp(self, area_extent_exp): """Get expected area definition.""" proj_dict = { @@ -217,10 +217,10 @@ def test_get_area_def(self, file_handler, area_exp): assert area == area_exp @pytest.mark.parametrize( - "ds_name,expected", + ("ds_name", "expected"), [ - ("ctt", xr.DataArray([[280, 290], [300, 310]], dims=('y', 'x'))), - ("cph", xr.DataArray([[0, 1], [2, 0]], dims=('y', 'x'))), + ("ctt", xr.DataArray([[280, 290], [300, 310]], dims=("y", "x"))), + ("cph", xr.DataArray([[0, 1], [2, 0]], dims=("y", "x"))), ] ) def test_get_dataset(self, file_handler, ds_name, expected): diff --git a/satpy/tests/reader_tests/test_electrol_hrit.py b/satpy/tests/reader_tests/test_electrol_hrit.py index eca413d033..b3e14c24d1 100644 --- a/satpy/tests/reader_tests/test_electrol_hrit.py +++ b/satpy/tests/reader_tests/test_electrol_hrit.py @@ -48,16 +48,16 @@ class Testrecarray2dict(unittest.TestCase): def test_fun(self): """Test record array.""" - inner_st = np.dtype([('test_str', '02d}": {'channels': [4, 5, 6, 8, 9], - 'grid_type': '1km'}, - "nir_{:>02d}": {'channels': [13, 16, 22], - 'grid_type': '1km'}, - "ir_{:>02d}": {'channels': [38, 87, 97, 105, 123, 133], - 'grid_type': '2km'}, - "wv_{:>02d}": {'channels': [63, 73], - 'grid_type': '2km'}, + "vis_{:>02d}": {"channels": [4, 5, 6, 8, 9], + "grid_type": "1km"}, + "nir_{:>02d}": {"channels": [13, 16, 22], + "grid_type": "1km"}, + "ir_{:>02d}": {"channels": [38, 87, 97, 105, 123, 133], + "grid_type": "2km"}, + "wv_{:>02d}": {"channels": [63, 73], + "grid_type": "2km"}, } @@ -336,15 +336,15 @@ class FakeFCIFileHandlerWithBadIDPFData(FakeFCIFileHandlerFDHSI): def _get_test_content_all_channels(self): data = super()._get_test_content_all_channels() - data['data/vis_06/measured/x'].attrs['scale_factor'] *= -1 - data['data/vis_06/measured/x'].attrs['scale_factor'] = \ - np.float32(data['data/vis_06/measured/x'].attrs['scale_factor']) - data['data/vis_06/measured/x'].attrs['add_offset'] = \ - np.float32(data['data/vis_06/measured/x'].attrs['add_offset']) - data['data/vis_06/measured/y'].attrs['scale_factor'] = \ - np.float32(data['data/vis_06/measured/y'].attrs['scale_factor']) - data['data/vis_06/measured/y'].attrs['add_offset'] = \ - np.float32(data['data/vis_06/measured/y'].attrs['add_offset']) + data["data/vis_06/measured/x"].attrs["scale_factor"] *= -1 + data["data/vis_06/measured/x"].attrs["scale_factor"] = \ + np.float32(data["data/vis_06/measured/x"].attrs["scale_factor"]) + data["data/vis_06/measured/x"].attrs["add_offset"] = \ + np.float32(data["data/vis_06/measured/x"].attrs["add_offset"]) + data["data/vis_06/measured/y"].attrs["scale_factor"] = \ + np.float32(data["data/vis_06/measured/y"].attrs["scale_factor"]) + data["data/vis_06/measured/y"].attrs["add_offset"] = \ + np.float32(data["data/vis_06/measured/y"].attrs["add_offset"]) data["state/celestial/earth_sun_distance"] = xr.DataArray(da.repeat(da.array([30000000]), 6000)) return data @@ -354,12 +354,12 @@ class FakeFCIFileHandlerHRFI(FakeFCIFileHandlerBase): """Mock HRFI data.""" chan_patterns = { - "vis_{:>02d}_hr": {'channels': [6], - 'grid_type': '500m'}, - "nir_{:>02d}_hr": {'channels': [22], - 'grid_type': '500m'}, - "ir_{:>02d}_hr": {'channels': [38, 105], - 'grid_type': '1km'}, + "vis_{:>02d}_hr": {"channels": [6], + "grid_type": "500m"}, + "nir_{:>02d}_hr": {"channels": [22], + "grid_type": "500m"}, + "ir_{:>02d}_hr": {"channels": [38, 105], + "grid_type": "1km"}, } @@ -367,7 +367,7 @@ class FakeFCIFileHandlerHRFI(FakeFCIFileHandlerBase): # Fixtures preparation ------------------------------- # ---------------------------------------------------- -@pytest.fixture +@pytest.fixture() def reader_configs(): """Return reader configs for FCI.""" from satpy._config import config_search_paths @@ -404,12 +404,12 @@ def clear_cache(reader): "terran": ["ir_38", "ir_105"], "terran_grid_type": ["1km"] * 2} -_test_filenames = {'fdhsi': [ +_test_filenames = {"fdhsi": [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc" ], - 'hrfi': [ + "hrfi": [ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-HRFI-FD--" "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" "20170410113925_20170410113934_N__C_0070_0067.nc" @@ -426,26 +426,26 @@ def mocked_basefilehandler(filehandler): yield -@pytest.fixture +@pytest.fixture() def FakeFCIFileHandlerFDHSI_fixture(): """Get a fixture for the fake FDHSI filehandler, including channel and file names.""" with mocked_basefilehandler(FakeFCIFileHandlerFDHSI): param_dict = { - 'filetype': 'fci_l1c_fdhsi', - 'channels': _chans_fdhsi, - 'filenames': _test_filenames['fdhsi'] + "filetype": "fci_l1c_fdhsi", + "channels": _chans_fdhsi, + "filenames": _test_filenames["fdhsi"] } yield param_dict -@pytest.fixture +@pytest.fixture() def FakeFCIFileHandlerHRFI_fixture(): """Get a fixture for the fake HRFI filehandler, including channel and file names.""" with mocked_basefilehandler(FakeFCIFileHandlerHRFI): param_dict = { - 'filetype': 'fci_l1c_hrfi', - 'channels': _chans_hrfi, - 'filenames': _test_filenames['hrfi'] + "filetype": "fci_l1c_hrfi", + "channels": _chans_hrfi, + "filenames": _test_filenames["hrfi"] } yield param_dict @@ -458,12 +458,12 @@ def FakeFCIFileHandlerHRFI_fixture(): class TestFCIL1cNCReader: """Test FCI L1c NetCDF reader with nominal data.""" - fh_param_for_filetype = {'hrfi': {'channels': _chans_hrfi, - 'filenames': _test_filenames['hrfi']}, - 'fdhsi': {'channels': _chans_fdhsi, - 'filenames': _test_filenames['fdhsi']}} + fh_param_for_filetype = {"hrfi": {"channels": _chans_hrfi, + "filenames": _test_filenames["hrfi"]}, + "fdhsi": {"channels": _chans_fdhsi, + "filenames": _test_filenames["fdhsi"]}} - @pytest.mark.parametrize('filenames', [_test_filenames['fdhsi'], _test_filenames['hrfi']]) + @pytest.mark.parametrize("filenames", [_test_filenames["fdhsi"], _test_filenames["hrfi"]]) def test_file_pattern(self, reader_configs, filenames): """Test file pattern matching.""" from satpy.readers import load_reader @@ -472,8 +472,8 @@ def test_file_pattern(self, reader_configs, filenames): files = reader.select_files_from_pathnames(filenames) assert len(files) == 1 - @pytest.mark.parametrize('filenames', [_test_filenames['fdhsi'][0].replace('BODY', 'TRAIL'), - _test_filenames['hrfi'][0].replace('BODY', 'TRAIL')]) + @pytest.mark.parametrize("filenames", [_test_filenames["fdhsi"][0].replace("BODY", "TRAIL"), + _test_filenames["hrfi"][0].replace("BODY", "TRAIL")]) def test_file_pattern_for_TRAIL_file(self, reader_configs, filenames): """Test file pattern matching for TRAIL files, which should not be picked up.""" from satpy.readers import load_reader @@ -482,226 +482,226 @@ def test_file_pattern_for_TRAIL_file(self, reader_configs, filenames): files = reader.select_files_from_pathnames(filenames) assert len(files) == 0 - @pytest.mark.parametrize('fh_param,expected_res_n', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), 16), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), 4)]) + @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) def test_load_counts(self, reader_configs, fh_param, expected_res_n): """Test loading with counts.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( [make_dataid(name=name, calibration="counts") for name in - fh_param['channels']["solar"] + fh_param['channels']["terran"]], pad_data=False) + fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param['channels']["solar"] + fh_param['channels']["terran"], - fh_param['channels']["solar_grid_type"] + - fh_param['channels']["terran_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) + for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], + fh_param["channels"]["solar_grid_type"] + + fh_param["channels"]["terran_grid_type"]): + assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) assert res[ch].dtype == np.uint16 assert res[ch].attrs["calibration"] == "counts" assert res[ch].attrs["units"] == "count" - if ch == 'ir_38': + if ch == "ir_38": numpy.testing.assert_array_equal(res[ch][-1], 1) numpy.testing.assert_array_equal(res[ch][0], 5000) else: numpy.testing.assert_array_equal(res[ch], 1) - @pytest.mark.parametrize('fh_param,expected_res_n', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), 16), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), 4)]) + @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) def test_load_radiance(self, reader_configs, fh_param, expected_res_n): """Test loading with radiance.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( [make_dataid(name=name, calibration="radiance") for name in - fh_param['channels']["solar"] + fh_param['channels']["terran"]], pad_data=False) + fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param['channels']["solar"] + fh_param['channels']["terran"], - fh_param['channels']["solar_grid_type"] + - fh_param['channels']["terran_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) + for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], + fh_param["channels"]["solar_grid_type"] + + fh_param["channels"]["terran_grid_type"]): + assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) assert res[ch].dtype == np.float64 assert res[ch].attrs["calibration"] == "radiance" - assert res[ch].attrs["units"] == 'mW m-2 sr-1 (cm-1)-1' + assert res[ch].attrs["units"] == "mW m-2 sr-1 (cm-1)-1" assert res[ch].attrs["radiance_unit_conversion_coefficient"] == 1234.56 - if ch == 'ir_38': + if ch == "ir_38": numpy.testing.assert_array_equal(res[ch][-1], 15) numpy.testing.assert_array_equal(res[ch][0], 9700) else: numpy.testing.assert_array_equal(res[ch], 15) - @pytest.mark.parametrize('fh_param,expected_res_n', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), 8), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), 2)]) + @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 8), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 2)]) def test_load_reflectance(self, reader_configs, fh_param, expected_res_n): """Test loading with reflectance.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( [make_dataid(name=name, calibration="reflectance") for name in - fh_param['channels']["solar"]], pad_data=False) + fh_param["channels"]["solar"]], pad_data=False) assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param['channels']["solar"], fh_param['channels']["solar_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) + for ch, grid_type in zip(fh_param["channels"]["solar"], fh_param["channels"]["solar_grid_type"]): + assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) assert res[ch].dtype == np.float64 assert res[ch].attrs["calibration"] == "reflectance" assert res[ch].attrs["units"] == "%" numpy.testing.assert_array_almost_equal(res[ch], 100 * 15 * 1 * np.pi / 50) - @pytest.mark.parametrize('fh_param,expected_res_n', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), 8), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), 2)]) + @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 8), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 2)]) def test_load_bt(self, reader_configs, caplog, fh_param, expected_res_n): """Test loading with bt.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) with caplog.at_level(logging.WARNING): res = reader.load( [make_dataid(name=name, calibration="brightness_temperature") for - name in fh_param['channels']["terran"]], pad_data=False) + name in fh_param["channels"]["terran"]], pad_data=False) assert caplog.text == "" assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param['channels']["terran"], fh_param['channels']["terran_grid_type"]): - assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) + for ch, grid_type in zip(fh_param["channels"]["terran"], fh_param["channels"]["terran_grid_type"]): + assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) assert res[ch].dtype == np.float64 assert res[ch].attrs["calibration"] == "brightness_temperature" assert res[ch].attrs["units"] == "K" - if ch == 'ir_38': + if ch == "ir_38": numpy.testing.assert_array_almost_equal(res[ch][-1], 209.68274099) numpy.testing.assert_array_almost_equal(res[ch][0], 1888.851296) else: numpy.testing.assert_array_almost_equal(res[ch], 209.68274099) - @pytest.mark.parametrize('fh_param', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture')), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'))]) + @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) def test_orbital_parameters_attr(self, reader_configs, fh_param): """Test the orbital parameter attribute.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( [make_dataid(name=name) for name in - fh_param['channels']["solar"] + fh_param['channels']["terran"]], pad_data=False) + fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) - for ch in fh_param['channels']["solar"] + fh_param['channels']["terran"]: + for ch in fh_param["channels"]["solar"] + fh_param["channels"]["terran"]: assert res[ch].attrs["orbital_parameters"] == { - 'satellite_actual_longitude': np.mean(np.arange(6000)), - 'satellite_actual_latitude': np.mean(np.arange(6000)), - 'satellite_actual_altitude': np.mean(np.arange(6000)), - 'satellite_nominal_longitude': 0.0, - 'satellite_nominal_latitude': 0, - 'satellite_nominal_altitude': 35786400.0, - 'projection_longitude': 0.0, - 'projection_latitude': 0, - 'projection_altitude': 35786400.0, + "satellite_actual_longitude": np.mean(np.arange(6000)), + "satellite_actual_latitude": np.mean(np.arange(6000)), + "satellite_actual_altitude": np.mean(np.arange(6000)), + "satellite_nominal_longitude": 0.0, + "satellite_nominal_latitude": 0, + "satellite_nominal_altitude": 35786400.0, + "projection_longitude": 0.0, + "projection_latitude": 0, + "projection_altitude": 35786400.0, } expected_pos_info_for_filetype = { - 'fdhsi': {'1km': {'start_position_row': 1, - 'end_position_row': 200, - 'segment_height': 200, - 'grid_width': 11136}, - '2km': {'start_position_row': 1, - 'end_position_row': 100, - 'segment_height': 100, - 'grid_width': 5568}}, - 'hrfi': {'500m': {'start_position_row': 1, - 'end_position_row': 400, - 'segment_height': 400, - 'grid_width': 22272}, - '1km': {'start_position_row': 1, - 'end_position_row': 200, - 'grid_width': 11136, - 'segment_height': 200}} + "fdhsi": {"1km": {"start_position_row": 1, + "end_position_row": 200, + "segment_height": 200, + "grid_width": 11136}, + "2km": {"start_position_row": 1, + "end_position_row": 100, + "segment_height": 100, + "grid_width": 5568}}, + "hrfi": {"500m": {"start_position_row": 1, + "end_position_row": 400, + "segment_height": 400, + "grid_width": 22272}, + "1km": {"start_position_row": 1, + "end_position_row": 200, + "grid_width": 11136, + "segment_height": 200}} } - @pytest.mark.parametrize('fh_param, expected_pos_info', [ - (lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), expected_pos_info_for_filetype['fdhsi']), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), expected_pos_info_for_filetype['hrfi']) + @pytest.mark.parametrize(("fh_param", "expected_pos_info"), [ + (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), expected_pos_info_for_filetype["fdhsi"]), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), expected_pos_info_for_filetype["hrfi"]) ]) def test_get_segment_position_info(self, reader_configs, fh_param, expected_pos_info): """Test the segment position info method.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) for filetype_handler in list(reader.file_handlers.values())[0]: segpos_info = filetype_handler.get_segment_position_info() assert segpos_info == expected_pos_info - @pytest.mark.parametrize('fh_param,expected_res_n', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), 16), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), 4)]) + @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) def test_load_index_map(self, reader_configs, fh_param, expected_res_n): """Test loading of index_map.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( - [name + '_index_map' for name in - fh_param['channels']["solar"] + fh_param['channels']["terran"]], pad_data=False) + [name + "_index_map" for name in + fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param['channels']["solar"] + fh_param['channels']["terran"], - fh_param['channels']["solar_grid_type"] + - fh_param['channels']["terran_grid_type"]): - assert res[ch + '_index_map'].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) - numpy.testing.assert_array_equal(res[ch + '_index_map'][1, 1], 110) - - @pytest.mark.parametrize('fh_param', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture')), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'))]) + for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], + fh_param["channels"]["solar_grid_type"] + + fh_param["channels"]["terran_grid_type"]): + assert res[ch + "_index_map"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) + numpy.testing.assert_array_equal(res[ch + "_index_map"][1, 1], 110) + + @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) def test_load_aux_data(self, reader_configs, fh_param): """Test loading of auxiliary data.""" from satpy.readers.fci_l1c_nc import AUX_DATA - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) - res = reader.load([fh_param['channels']['solar'][0] + '_' + key for key in AUX_DATA.keys()], + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) + res = reader.load([fh_param["channels"]["solar"][0] + "_" + key for key in AUX_DATA.keys()], pad_data=False) - grid_type = fh_param['channels']['solar_grid_type'][0] - for aux in [fh_param['channels']['solar'][0] + '_' + key for key in AUX_DATA.keys()]: - assert res[aux].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) - if aux == fh_param['channels']['solar'][0] + '_earth_sun_distance': + grid_type = fh_param["channels"]["solar_grid_type"][0] + for aux in [fh_param["channels"]["solar"][0] + "_" + key for key in AUX_DATA.keys()]: + assert res[aux].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) + if aux == fh_param["channels"]["solar"][0] + "_earth_sun_distance": numpy.testing.assert_array_equal(res[aux][1, 1], 149597870.7) else: numpy.testing.assert_array_equal(res[aux][1, 1], 10) - @pytest.mark.parametrize('fh_param,expected_res_n', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), 16), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), 4)]) + @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)]) def test_load_quality_only(self, reader_configs, fh_param, expected_res_n): """Test that loading quality only works.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load( - [name + '_pixel_quality' for name in - fh_param['channels']["solar"] + fh_param['channels']["terran"]], pad_data=False) + [name + "_pixel_quality" for name in + fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False) assert expected_res_n == len(res) - for ch, grid_type in zip(fh_param['channels']["solar"] + fh_param['channels']["terran"], - fh_param['channels']["solar_grid_type"] + - fh_param['channels']["terran_grid_type"]): - assert res[ch + '_pixel_quality'].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['nrows'], - GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]['ncols']) - numpy.testing.assert_array_equal(res[ch + '_pixel_quality'][1, 1], 3) - assert res[ch + '_pixel_quality'].attrs["name"] == ch + '_pixel_quality' - - @pytest.mark.parametrize('fh_param', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture')), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'))]) + for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"], + fh_param["channels"]["solar_grid_type"] + + fh_param["channels"]["terran_grid_type"]): + assert res[ch + "_pixel_quality"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"], + GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"]) + numpy.testing.assert_array_equal(res[ch + "_pixel_quality"][1, 1], 3) + assert res[ch + "_pixel_quality"].attrs["name"] == ch + "_pixel_quality" + + @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) def test_platform_name(self, reader_configs, fh_param): """Test that platform name is exposed. Test that the FCI reader exposes the platform name. Corresponds to GH issue 1014. """ - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) res = reader.load(["vis_06"], pad_data=False) assert res["vis_06"].attrs["platform_name"] == "MTG-I1" - @pytest.mark.parametrize('fh_param, expected_area', [ - (lazy_fixture('FakeFCIFileHandlerFDHSI_fixture'), ['mtg_fci_fdss_1km', 'mtg_fci_fdss_2km']), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'), ['mtg_fci_fdss_500m', 'mtg_fci_fdss_1km']), + @pytest.mark.parametrize(("fh_param", "expected_area"), [ + (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), ["mtg_fci_fdss_1km", "mtg_fci_fdss_2km"]), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), ["mtg_fci_fdss_500m", "mtg_fci_fdss_1km"]), ]) def test_area_definition_computation(self, reader_configs, fh_param, expected_area): """Test that the geolocation computation is correct.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) - res = reader.load(['ir_105', 'vis_06'], pad_data=False) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) + res = reader.load(["ir_105", "vis_06"], pad_data=False) # test that area_ids are harmonisation-conform ___ - assert res['vis_06'].attrs['area'].area_id == expected_area[0] - assert res['ir_105'].attrs['area'].area_id == expected_area[1] + assert res["vis_06"].attrs["area"].area_id == expected_area[0] + assert res["ir_105"].attrs["area"].area_id == expected_area[1] - area_def = res['ir_105'].attrs['area'] + area_def = res["ir_105"].attrs["area"] # test area extents computation np.testing.assert_array_almost_equal(np.array(area_def.area_extent), np.array([-5567999.994203, -5367999.994411, @@ -709,23 +709,23 @@ def test_area_definition_computation(self, reader_configs, fh_param, expected_ar decimal=2) # check that the projection is read in properly - assert area_def.crs.coordinate_operation.method_name == 'Geostationary Satellite (Sweep Y)' + assert area_def.crs.coordinate_operation.method_name == "Geostationary Satellite (Sweep Y)" assert area_def.crs.coordinate_operation.params[0].value == 0.0 # projection origin longitude assert area_def.crs.coordinate_operation.params[1].value == 35786400.0 # projection height assert area_def.crs.ellipsoid.semi_major_metre == 6378137.0 assert area_def.crs.ellipsoid.inverse_flattening == 298.257223563 assert area_def.crs.ellipsoid.is_semi_minor_computed - @pytest.mark.parametrize('fh_param', [(lazy_fixture('FakeFCIFileHandlerFDHSI_fixture')), - (lazy_fixture('FakeFCIFileHandlerHRFI_fixture'))]) + @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")), + (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))]) def test_excs(self, reader_configs, fh_param): """Test that exceptions are raised where expected.""" - reader = _get_reader_with_filehandlers(fh_param['filenames'], reader_configs) + reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs) - with pytest.raises(ValueError): - reader.file_handlers[fh_param['filetype']][0].get_dataset(make_dataid(name="invalid"), {}) - with pytest.raises(ValueError): - reader.file_handlers[fh_param['filetype']][0].get_dataset( + with pytest.raises(ValueError, match="Unknown dataset key, not a channel, quality or auxiliary data: invalid"): + reader.file_handlers[fh_param["filetype"]][0].get_dataset(make_dataid(name="invalid"), {}) + with pytest.raises(ValueError, match="unknown invalid value for "): + reader.file_handlers[fh_param["filetype"]][0].get_dataset( make_dataid(name="ir_123", calibration="unknown"), {"units": "unknown"}) @@ -736,7 +736,7 @@ def test_load_composite(self): # in the tests.compositor_tests package from satpy.composites.config_loader import load_compositor_configs_for_sensors - comps, mods = load_compositor_configs_for_sensors(['fci']) + comps, mods = load_compositor_configs_for_sensors(["fci"]) assert len(comps["fci"]) > 0 assert len(mods["fci"]) > 0 @@ -747,7 +747,7 @@ class TestFCIL1cNCReaderBadData: def test_handling_bad_data_ir(self, reader_configs, caplog): """Test handling of bad IR data.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadData): - reader = _get_reader_with_filehandlers(_test_filenames['fdhsi'], reader_configs) + reader = _get_reader_with_filehandlers(_test_filenames["fdhsi"], reader_configs) with caplog.at_level(logging.ERROR): reader.load([make_dataid( name="ir_105", @@ -757,7 +757,7 @@ def test_handling_bad_data_ir(self, reader_configs, caplog): def test_handling_bad_data_vis(self, reader_configs, caplog): """Test handling of bad VIS data.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadData): - reader = _get_reader_with_filehandlers(_test_filenames['fdhsi'], reader_configs) + reader = _get_reader_with_filehandlers(_test_filenames["fdhsi"], reader_configs) with caplog.at_level(logging.ERROR): reader.load([make_dataid( name="vis_06", @@ -771,7 +771,7 @@ class TestFCIL1cNCReaderBadDataFromIDPF: def test_handling_bad_earthsun_distance(self, reader_configs): """Test handling of bad earth-sun distance data.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadIDPFData): - reader = _get_reader_with_filehandlers(_test_filenames['fdhsi'], reader_configs) + reader = _get_reader_with_filehandlers(_test_filenames["fdhsi"], reader_configs) res = reader.load([make_dataid(name=["vis_06"], calibration="reflectance")], pad_data=False) numpy.testing.assert_array_almost_equal(res["vis_06"], 100 * 15 * 1 * np.pi / 50) @@ -779,10 +779,10 @@ def test_handling_bad_earthsun_distance(self, reader_configs): def test_bad_xy_coords(self, reader_configs): """Test that the geolocation computation is correct.""" with mocked_basefilehandler(FakeFCIFileHandlerWithBadIDPFData): - reader = _get_reader_with_filehandlers(_test_filenames['fdhsi'], reader_configs) - res = reader.load(['vis_06'], pad_data=False) + reader = _get_reader_with_filehandlers(_test_filenames["fdhsi"], reader_configs) + res = reader.load(["vis_06"], pad_data=False) - area_def = res['vis_06'].attrs['area'] + area_def = res["vis_06"].attrs["area"] # test area extents computation np.testing.assert_array_almost_equal(np.array(area_def.area_extent), np.array([-5568000.227139, -5368000.221262, diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index 9ebbdb32e7..22611a8469 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -33,20 +33,20 @@ from satpy.tests.utils import make_dataid AREA_DEF = geometry.AreaDefinition( - 'mtg_fci_fdss_2km', - 'MTG FCI Full Disk Scanning Service area definition with 2 km resolution', + "mtg_fci_fdss_2km", + "MTG FCI Full Disk Scanning Service area definition with 2 km resolution", "", - {'h': 35786400., 'lon_0': 0.0, 'ellps': 'WGS84', 'proj': 'geos', 'units': 'm'}, + {"h": 35786400., "lon_0": 0.0, "ellps": "WGS84", "proj": "geos", "units": "m"}, 5568, 5568, (-5567999.9942, 5567999.9942, 5567999.9942, -5567999.9942) ) SEG_AREA_DEF = geometry.AreaDefinition( - 'mtg_fci_fdss_32km', - 'MTG FCI Full Disk Scanning Service area definition with 32 km resolution', + "mtg_fci_fdss_32km", + "MTG FCI Full Disk Scanning Service area definition with 32 km resolution", "", - {'h': 35786400., 'lon_0': 0.0, 'ellps': 'WGS84', 'proj': 'geos', 'units': 'm'}, + {"h": 35786400., "lon_0": 0.0, "ellps": "WGS84", "proj": "geos", "units": "m"}, 348, 348, (-5567999.9942, 5567999.9942, 5567999.9942, -5567999.9942) @@ -61,42 +61,42 @@ def setUp(self): # Easiest way to test the reader is to create a test netCDF file on the fly # Create unique filenames to prevent race conditions when tests are run in parallel self.test_file = str(uuid.uuid4()) + ".nc" - with Dataset(self.test_file, 'w') as nc: + with Dataset(self.test_file, "w") as nc: # Create dimensions - nc.createDimension('number_of_columns', 10) - nc.createDimension('number_of_rows', 100) - nc.createDimension('maximum_number_of_layers', 2) + nc.createDimension("number_of_columns", 10) + nc.createDimension("number_of_rows", 100) + nc.createDimension("maximum_number_of_layers", 2) # add global attributes - nc.data_source = 'test_data_source' - nc.platform = 'test_platform' + nc.data_source = "test_data_source" + nc.platform = "test_platform" # Add datasets - x = nc.createVariable('x', np.float32, dimensions=('number_of_columns',)) - x.standard_name = 'projection_x_coordinate' + x = nc.createVariable("x", np.float32, dimensions=("number_of_columns",)) + x.standard_name = "projection_x_coordinate" x[:] = np.arange(10) - y = nc.createVariable('y', np.float32, dimensions=('number_of_rows',)) - y.standard_name = 'projection_y_coordinate' + y = nc.createVariable("y", np.float32, dimensions=("number_of_rows",)) + y.standard_name = "projection_y_coordinate" y[:] = np.arange(100) - s = nc.createVariable('product_quality', np.int8) + s = nc.createVariable("product_quality", np.int8) s[:] = 99. - one_layer_dataset = nc.createVariable('test_one_layer', np.float32, - dimensions=('number_of_rows', 'number_of_columns')) + one_layer_dataset = nc.createVariable("test_one_layer", np.float32, + dimensions=("number_of_rows", "number_of_columns")) one_layer_dataset[:] = np.ones((100, 10)) - one_layer_dataset.test_attr = 'attr' - one_layer_dataset.units = 'test_units' + one_layer_dataset.test_attr = "attr" + one_layer_dataset.units = "test_units" - two_layers_dataset = nc.createVariable('test_two_layers', np.float32, - dimensions=('maximum_number_of_layers', - 'number_of_rows', - 'number_of_columns')) + two_layers_dataset = nc.createVariable("test_two_layers", np.float32, + dimensions=("maximum_number_of_layers", + "number_of_rows", + "number_of_columns")) two_layers_dataset[0, :, :] = np.ones((100, 10)) two_layers_dataset[1, :, :] = 2 * np.ones((100, 10)) - mtg_geos_projection = nc.createVariable('mtg_geos_projection', int, dimensions=()) + mtg_geos_projection = nc.createVariable("mtg_geos_projection", int, dimensions=()) mtg_geos_projection.longitude_of_projection_origin = 0.0 mtg_geos_projection.semi_major_axis = 6378137. mtg_geos_projection.inverse_flattening = 298.257223563 @@ -114,89 +114,89 @@ def tearDown(self): def test_all_basic(self): """Test all basic functionalities.""" - self.assertEqual(self.fh.spacecraft_name, 'test_platform') - self.assertEqual(self.fh.sensor_name, 'test_data_source') - self.assertEqual(self.fh.ssp_lon, 0.0) + assert self.fh.spacecraft_name == "test_platform" + assert self.fh.sensor_name == "test_data_source" + assert self.fh.ssp_lon == 0.0 global_attributes = self.fh._get_global_attributes() expected_global_attributes = { - 'filename': self.test_file, - 'spacecraft_name': 'test_platform', - 'ssp_lon': 0.0, - 'sensor': 'test_data_source', - 'platform_name': 'test_platform' + "filename": self.test_file, + "spacecraft_name": "test_platform", + "ssp_lon": 0.0, + "sensor": "test_data_source", + "platform_name": "test_platform" } - self.assertEqual(global_attributes, expected_global_attributes) + assert global_attributes == expected_global_attributes - @mock.patch('satpy.readers.fci_l2_nc.geometry.AreaDefinition') - @mock.patch('satpy.readers.fci_l2_nc.make_ext') + @mock.patch("satpy.readers.fci_l2_nc.geometry.AreaDefinition") + @mock.patch("satpy.readers.fci_l2_nc.make_ext") def test_area_definition(self, me_, gad_): """Test the area definition computation.""" - self.fh._compute_area_def(make_dataid(name='test_area_def', resolution=2000)) + self.fh._compute_area_def(make_dataid(name="test_area_def", resolution=2000)) # Asserts that the make_ext function was called with the correct arguments me_.assert_called_once() args, kwargs = me_.call_args np.testing.assert_allclose(args, [-0.0, -515.6620, 5672.28217, 0.0, 35786400.]) - proj_dict = {'a': 6378137., - 'lon_0': 0.0, - 'h': 35786400, + proj_dict = {"a": 6378137., + "lon_0": 0.0, + "h": 35786400, "rf": 298.257223563, - 'proj': 'geos', - 'units': 'm', - 'sweep': 'y'} + "proj": "geos", + "units": "m", + "sweep": "y"} # Asserts that the get_area_definition function was called with the correct arguments gad_.assert_called_once() args, kwargs = gad_.call_args - self.assertEqual(args[0], 'mtg_fci_fdss_2km') - self.assertEqual(args[1], 'MTG FCI Full Disk Scanning Service area definition with 2 km resolution') - self.assertEqual(args[2], '') - self.assertEqual(args[3], proj_dict) - self.assertEqual(args[4], 10) - self.assertEqual(args[5], 100) + assert args[0] == "mtg_fci_fdss_2km" + assert args[1] == "MTG FCI Full Disk Scanning Service area definition with 2 km resolution" + assert args[2] == "" + assert args[3] == proj_dict + assert args[4] == 10 + assert args[5] == 100 def test_dataset(self): """Test the correct execution of the get_dataset function with a valid file_key.""" - dataset = self.fh.get_dataset(make_dataid(name='test_one_layer', resolution=2000), - {'name': 'test_one_layer', - 'file_key': 'test_one_layer', - 'fill_value': -999, - 'file_type': 'test_file_type'}) + dataset = self.fh.get_dataset(make_dataid(name="test_one_layer", resolution=2000), + {"name": "test_one_layer", + "file_key": "test_one_layer", + "fill_value": -999, + "file_type": "test_file_type"}) np.testing.assert_allclose(dataset.values, np.ones((100, 10))) - self.assertEqual(dataset.attrs['test_attr'], 'attr') - self.assertEqual(dataset.attrs['units'], 'test_units') - self.assertEqual(dataset.attrs['fill_value'], -999) + assert dataset.attrs["test_attr"] == "attr" + assert dataset.attrs["units"] == "test_units" + assert dataset.attrs["fill_value"] == -999 def test_dataset_with_layer(self): """Check the correct execution of the get_dataset function with a valid file_key & layer.""" - dataset = self.fh.get_dataset(make_dataid(name='test_two_layers', resolution=2000), - {'name': 'test_two_layers', - 'file_key': 'test_two_layers', 'layer': 1, - 'fill_value': -999, - 'file_type': 'test_file_type'}) + dataset = self.fh.get_dataset(make_dataid(name="test_two_layers", resolution=2000), + {"name": "test_two_layers", + "file_key": "test_two_layers", "layer": 1, + "fill_value": -999, + "file_type": "test_file_type"}) np.testing.assert_allclose(dataset.values, 2 * np.ones((100, 10))) - self.assertEqual(dataset.attrs['units'], None) - self.assertEqual(dataset.attrs['spacecraft_name'], 'test_platform') + assert dataset.attrs["units"] is None + assert dataset.attrs["spacecraft_name"] == "test_platform" def test_dataset_with_invalid_filekey(self): """Test the correct execution of the get_dataset function with an invalid file_key.""" - invalid_dataset = self.fh.get_dataset(make_dataid(name='test_invalid', resolution=2000), - {'name': 'test_invalid', - 'file_key': 'test_invalid', - 'fill_value': -999, - 'file_type': 'test_file_type'}) - self.assertEqual(invalid_dataset, None) + invalid_dataset = self.fh.get_dataset(make_dataid(name="test_invalid", resolution=2000), + {"name": "test_invalid", + "file_key": "test_invalid", + "fill_value": -999, + "file_type": "test_file_type"}) + assert invalid_dataset is None def test_dataset_with_total_cot(self): """Test the correct execution of the get_dataset function for total COT (add contributions from two layers).""" - dataset = self.fh.get_dataset(make_dataid(name='retrieved_cloud_optical_thickness', resolution=2000), - {'name': 'retrieved_cloud_optical_thickness', - 'file_key': 'test_two_layers', - 'fill_value': -999, - 'file_type': 'test_file_type'}) + dataset = self.fh.get_dataset(make_dataid(name="retrieved_cloud_optical_thickness", resolution=2000), + {"name": "retrieved_cloud_optical_thickness", + "file_key": "test_two_layers", + "fill_value": -999, + "file_type": "test_file_type"}) # Checks that the function returns None expected_sum = np.empty((100, 10)) expected_sum[:] = np.log10(10**2 + 10**1) @@ -205,11 +205,11 @@ def test_dataset_with_total_cot(self): def test_dataset_with_scalar(self): """Test the execution of the get_dataset function for scalar values.""" # Checks returned scalar value - dataset = self.fh.get_dataset(make_dataid(name='test_scalar'), - {'name': 'product_quality', - 'file_key': 'product_quality', - 'file_type': 'test_file_type'}) - self.assertEqual(dataset.values, 99.) + dataset = self.fh.get_dataset(make_dataid(name="test_scalar"), + {"name": "product_quality", + "file_key": "product_quality", + "file_type": "test_file_type"}) + assert dataset.values == 99.0 # Checks that no AreaDefintion is implemented for scalar values with pytest.raises(NotImplementedError): @@ -223,44 +223,44 @@ def setUp(self): """Set up the test by creating a test file and opening it with the reader.""" # Easiest way to test the reader is to create a test netCDF file on the fly self.seg_test_file = str(uuid.uuid4()) + ".nc" - with Dataset(self.seg_test_file, 'w') as nc: + with Dataset(self.seg_test_file, "w") as nc: # Create dimensions - nc.createDimension('number_of_FoR_cols', 348) - nc.createDimension('number_of_FoR_rows', 348) - nc.createDimension('number_of_channels', 8) - nc.createDimension('number_of_categories', 6) + nc.createDimension("number_of_FoR_cols", 348) + nc.createDimension("number_of_FoR_rows", 348) + nc.createDimension("number_of_channels", 8) + nc.createDimension("number_of_categories", 6) # add global attributes - nc.data_source = 'test_fci_data_source' - nc.platform = 'test_fci_platform' + nc.data_source = "test_fci_data_source" + nc.platform = "test_fci_platform" # Add datasets - x = nc.createVariable('x', np.float32, dimensions=('number_of_FoR_cols',)) - x.standard_name = 'projection_x_coordinate' + x = nc.createVariable("x", np.float32, dimensions=("number_of_FoR_cols",)) + x.standard_name = "projection_x_coordinate" x[:] = np.arange(348) - y = nc.createVariable('y', np.float32, dimensions=('number_of_FoR_rows',)) - y.standard_name = 'projection_y_coordinate' + y = nc.createVariable("y", np.float32, dimensions=("number_of_FoR_rows",)) + y.standard_name = "projection_y_coordinate" y[:] = np.arange(348) - s = nc.createVariable('product_quality', np.int8) + s = nc.createVariable("product_quality", np.int8) s[:] = 99. - chans = nc.createVariable('channels', np.float32, dimensions=('number_of_channels',)) - chans.standard_name = 'fci_channels' + chans = nc.createVariable("channels", np.float32, dimensions=("number_of_channels",)) + chans.standard_name = "fci_channels" chans[:] = np.arange(8) - cats = nc.createVariable('categories', np.float32, dimensions=('number_of_categories',)) - cats.standard_name = 'product_categories' + cats = nc.createVariable("categories", np.float32, dimensions=("number_of_categories",)) + cats.standard_name = "product_categories" cats[:] = np.arange(6) - test_dataset = nc.createVariable('test_values', np.float32, - dimensions=('number_of_FoR_rows', 'number_of_FoR_cols', - 'number_of_channels', 'number_of_categories')) + test_dataset = nc.createVariable("test_values", np.float32, + dimensions=("number_of_FoR_rows", "number_of_FoR_cols", + "number_of_channels", "number_of_categories")) test_dataset[:] = self._get_unique_array(range(8), range(6)) - test_dataset.test_attr = 'attr' - test_dataset.units = 'test_units' + test_dataset.test_attr = "attr" + test_dataset.units = "test_units" def tearDown(self): """Remove the previously created test file.""" @@ -274,35 +274,35 @@ def test_all_basic(self): """Test all basic functionalities.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) - assert self.fh.spacecraft_name == 'test_fci_platform' - assert self.fh.sensor_name == 'test_fci_data_source' + assert self.fh.spacecraft_name == "test_fci_platform" + assert self.fh.sensor_name == "test_fci_data_source" assert self.fh.ssp_lon == 0.0 global_attributes = self.fh._get_global_attributes() expected_global_attributes = { - 'filename': self.seg_test_file, - 'spacecraft_name': 'test_fci_platform', - 'ssp_lon': 0.0, - 'sensor': 'test_fci_data_source', - 'platform_name': 'test_fci_platform' + "filename": self.seg_test_file, + "spacecraft_name": "test_fci_platform", + "ssp_lon": 0.0, + "sensor": "test_fci_data_source", + "platform_name": "test_fci_platform" } - self.assertEqual(global_attributes, expected_global_attributes) + assert global_attributes == expected_global_attributes def test_dataset(self): """Test the correct execution of the get_dataset function with valid file_key.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) # Checks the correct execution of the get_dataset function with a valid file_key - dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), - {'name': 'test_values', - 'file_key': 'test_values', - 'fill_value': -999, }) + dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), + {"name": "test_values", + "file_key": "test_values", + "fill_value": -999, }) expected_dataset = self._get_unique_array(range(8), range(6)) np.testing.assert_allclose(dataset.values, expected_dataset) - self.assertEqual(dataset.attrs['test_attr'], 'attr') - self.assertEqual(dataset.attrs['units'], 'test_units') - self.assertEqual(dataset.attrs['fill_value'], -999) + assert dataset.attrs["test_attr"] == "attr" + assert dataset.attrs["units"] == "test_units" + assert dataset.attrs["fill_value"] == -999 # Checks that no AreaDefintion is implemented with pytest.raises(NotImplementedError): @@ -313,12 +313,12 @@ def test_dataset_with_invalid_filekey(self): self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) # Checks the correct execution of the get_dataset function with an invalid file_key - invalid_dataset = self.fh.get_dataset(make_dataid(name='test_invalid', resolution=32000), - {'name': 'test_invalid', - 'file_key': 'test_invalid', - 'fill_value': -999, }) + invalid_dataset = self.fh.get_dataset(make_dataid(name="test_invalid", resolution=32000), + {"name": "test_invalid", + "file_key": "test_invalid", + "fill_value": -999, }) # Checks that the function returns None - self.assertEqual(invalid_dataset, None) + assert invalid_dataset is None def test_dataset_with_adef(self): """Test the correct execution of the get_dataset function with `with_area_definition=True`.""" @@ -326,39 +326,39 @@ def test_dataset_with_adef(self): with_area_definition=True) # Checks the correct execution of the get_dataset function with a valid file_key - dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), - {'name': 'test_values', - 'file_key': 'test_values', - 'fill_value': -999, - 'coordinates': ('test_lon', 'test_lat'), }) + dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), + {"name": "test_values", + "file_key": "test_values", + "fill_value": -999, + "coordinates": ("test_lon", "test_lat"), }) expected_dataset = self._get_unique_array(range(8), range(6)) np.testing.assert_allclose(dataset.values, expected_dataset) - self.assertEqual(dataset.attrs['test_attr'], 'attr') - self.assertEqual(dataset.attrs['units'], 'test_units') - self.assertEqual(dataset.attrs['fill_value'], -999) + assert dataset.attrs["test_attr"] == "attr" + assert dataset.attrs["units"] == "test_units" + assert dataset.attrs["fill_value"] == -999 # Checks returned AreaDefinition against reference adef = self.fh.get_area_def(None) - self.assertEqual(adef, SEG_AREA_DEF) + assert adef == SEG_AREA_DEF def test_dataset_with_adef_and_wrongs_dims(self): """Test the correct execution of the get_dataset function with dims that don't match expected AreaDefinition.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}, with_area_definition=True) with pytest.raises(NotImplementedError): - self.fh.get_dataset(make_dataid(name='test_wrong_dims', resolution=6000), - {'name': 'test_wrong_dims', 'file_key': 'test_values', 'fill_value': -999} + self.fh.get_dataset(make_dataid(name="test_wrong_dims", resolution=6000), + {"name": "test_wrong_dims", "file_key": "test_values", "fill_value": -999} ) def test_dataset_with_scalar(self): """Test the execution of the get_dataset function for scalar values.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) # Checks returned scalar value - dataset = self.fh.get_dataset(make_dataid(name='test_scalar'), - {'name': 'product_quality', - 'file_key': 'product_quality', - 'file_type': 'test_file_type'}) - self.assertEqual(dataset.values, 99.) + dataset = self.fh.get_dataset(make_dataid(name="test_scalar"), + {"name": "product_quality", + "file_key": "product_quality", + "file_type": "test_file_type"}) + assert dataset.values == 99.0 # Checks that no AreaDefintion is implemented for scalar values with pytest.raises(NotImplementedError): @@ -368,11 +368,11 @@ def test_dataset_slicing_catid(self): """Test the correct execution of the _slice_dataset function with 'category_id' set.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) - dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), - {'name': 'test_values', - 'file_key': 'test_values', - 'fill_value': -999, - 'category_id': 5}) + dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), + {"name": "test_values", + "file_key": "test_values", + "fill_value": -999, + "category_id": 5}) expected_dataset = self._get_unique_array(range(8), 5) np.testing.assert_allclose(dataset.values, expected_dataset) @@ -380,11 +380,11 @@ def test_dataset_slicing_chid_catid(self): """Test the correct execution of the _slice_dataset function with 'channel_id' and 'category_id' set.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) - dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), - {'name': 'test_values', - 'file_key': 'test_values', - 'fill_value': -999, - 'channel_id': 0, 'category_id': 1}) + dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), + {"name": "test_values", + "file_key": "test_values", + "fill_value": -999, + "channel_id": 0, "category_id": 1}) expected_dataset = self._get_unique_array(0, 1) np.testing.assert_allclose(dataset.values, expected_dataset) @@ -392,12 +392,12 @@ def test_dataset_slicing_visid_catid(self): """Test the correct execution of the _slice_dataset function with 'vis_channel_id' and 'category_id' set.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) - self.fh.nc = self.fh.nc.rename_dims({'number_of_channels': 'number_of_vis_channels'}) - dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), - {'name': 'test_values', - 'file_key': 'test_values', - 'fill_value': -999, - 'vis_channel_id': 3, 'category_id': 3}) + self.fh.nc = self.fh.nc.rename_dims({"number_of_channels": "number_of_vis_channels"}) + dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), + {"name": "test_values", + "file_key": "test_values", + "fill_value": -999, + "vis_channel_id": 3, "category_id": 3}) expected_dataset = self._get_unique_array(3, 3) np.testing.assert_allclose(dataset.values, expected_dataset) @@ -405,21 +405,21 @@ def test_dataset_slicing_irid(self): """Test the correct execution of the _slice_dataset function with 'ir_channel_id' set.""" self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) - self.fh.nc = self.fh.nc.rename_dims({'number_of_channels': 'number_of_ir_channels'}) - dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), - {'name': 'test_values', - 'file_key': 'test_values', - 'fill_value': -999, - 'ir_channel_id': 4}) + self.fh.nc = self.fh.nc.rename_dims({"number_of_channels": "number_of_ir_channels"}) + dataset = self.fh.get_dataset(make_dataid(name="test_values", resolution=32000), + {"name": "test_values", + "file_key": "test_values", + "fill_value": -999, + "ir_channel_id": 4}) expected_dataset = self._get_unique_array(4, range(6)) np.testing.assert_allclose(dataset.values, expected_dataset) @staticmethod def _get_unique_array(iarr, jarr): - if not hasattr(iarr, '__iter__'): + if not hasattr(iarr, "__iter__"): iarr = [iarr] - if not hasattr(jarr, '__iter__'): + if not hasattr(jarr, "__iter__"): jarr = [jarr] array = np.zeros((348, 348, 8, 6)) @@ -440,32 +440,32 @@ def setUp(self): """Set up the test by creating a test file and opening it with the reader.""" # Easiest way to test the reader is to create a test netCDF file on the fly self.test_byte_file = str(uuid.uuid4()) + ".nc" - with Dataset(self.test_byte_file, 'w') as nc_byte: + with Dataset(self.test_byte_file, "w") as nc_byte: # Create dimensions - nc_byte.createDimension('number_of_columns', 1) - nc_byte.createDimension('number_of_rows', 1) + nc_byte.createDimension("number_of_columns", 1) + nc_byte.createDimension("number_of_rows", 1) # add global attributes - nc_byte.data_source = 'test_data_source' - nc_byte.platform = 'test_platform' + nc_byte.data_source = "test_data_source" + nc_byte.platform = "test_platform" # Add datasets - x = nc_byte.createVariable('x', np.float32, dimensions=('number_of_columns',)) - x.standard_name = 'projection_x_coordinate' + x = nc_byte.createVariable("x", np.float32, dimensions=("number_of_columns",)) + x.standard_name = "projection_x_coordinate" x[:] = np.arange(1) - y = nc_byte.createVariable('y', np.float32, dimensions=('number_of_rows',)) - x.standard_name = 'projection_y_coordinate' + y = nc_byte.createVariable("y", np.float32, dimensions=("number_of_rows",)) + x.standard_name = "projection_y_coordinate" y[:] = np.arange(1) - mtg_geos_projection = nc_byte.createVariable('mtg_geos_projection', int, dimensions=()) + mtg_geos_projection = nc_byte.createVariable("mtg_geos_projection", int, dimensions=()) mtg_geos_projection.longitude_of_projection_origin = 0.0 mtg_geos_projection.semi_major_axis = 6378137. mtg_geos_projection.inverse_flattening = 298.257223563 mtg_geos_projection.perspective_point_height = 35786400. - test_dataset = nc_byte.createVariable('cloud_mask_test_flag', np.float32, - dimensions=('number_of_rows', 'number_of_columns',)) + test_dataset = nc_byte.createVariable("cloud_mask_test_flag", np.float32, + dimensions=("number_of_rows", "number_of_columns",)) # This number was chosen as we know the expected byte values test_dataset[:] = 4544767 @@ -487,23 +487,23 @@ def tearDown(self): def test_byte_extraction(self): """Test the execution of the get_dataset function.""" # Value of 1 is expected to be returned for this test - dataset = self.byte_reader.get_dataset(make_dataid(name='cloud_mask_test_flag', resolution=2000), - {'name': 'cloud_mask_test_flag', - 'file_key': 'cloud_mask_test_flag', - 'fill_value': -999, - 'file_type': 'nc_fci_test_clm', - 'extract_byte': 1, + dataset = self.byte_reader.get_dataset(make_dataid(name="cloud_mask_test_flag", resolution=2000), + {"name": "cloud_mask_test_flag", + "file_key": "cloud_mask_test_flag", + "fill_value": -999, + "file_type": "nc_fci_test_clm", + "extract_byte": 1, }) - self.assertEqual(dataset.values, 1) + assert dataset.values == 1 # Value of 0 is expected fto be returned or this test - dataset = self.byte_reader.get_dataset(make_dataid(name='cloud_mask_test_flag', resolution=2000), - {'name': 'cloud_mask_test_flag', - 'file_key': 'cloud_mask_test_flag', - 'fill_value': -999, 'mask_value': 0., - 'file_type': 'nc_fci_test_clm', - 'extract_byte': 23, + dataset = self.byte_reader.get_dataset(make_dataid(name="cloud_mask_test_flag", resolution=2000), + {"name": "cloud_mask_test_flag", + "file_key": "cloud_mask_test_flag", + "fill_value": -999, "mask_value": 0., + "file_type": "nc_fci_test_clm", + "extract_byte": 23, }) - self.assertEqual(dataset.values, 0) + assert dataset.values == 0 diff --git a/satpy/tests/reader_tests/test_fy4_base.py b/satpy/tests/reader_tests/test_fy4_base.py index 432117e1ad..4d0aea8c1d 100644 --- a/satpy/tests/reader_tests/test_fy4_base.py +++ b/satpy/tests/reader_tests/test_fy4_base.py @@ -30,11 +30,11 @@ class Test_FY4Base: def setup_method(self): """Initialise the tests.""" - self.p = mock.patch.object(FY4Base, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(FY4Base, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True - self.file_type = {'file_type': 'agri_l1_0500m'} + self.file_type = {"file_type": "agri_l1_0500m"} def teardown_method(self): """Stop wrapping the HDF5 file handler.""" @@ -42,19 +42,19 @@ def teardown_method(self): def test_badsensor(self): """Test case where we pass a bad sensor name, must be GHI or AGRI.""" - fy4 = FY4Base(None, {'platform_id': 'FY4A', 'instrument': 'FCI'}, self.file_type) - with pytest.raises(ValueError): + fy4 = FY4Base(None, {"platform_id": "FY4A", "instrument": "FCI"}, self.file_type) + with pytest.raises(ValueError, match="Unsupported sensor type: FCI"): fy4.calibrate_to_reflectance(None, None, None) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Error, sensor must be GHI or AGRI."): fy4.calibrate_to_bt(None, None, None) def test_badcalibration(self): """Test case where we pass a bad calibration type, radiance is not supported.""" - fy4 = FY4Base(None, {'platform_id': 'FY4A', 'instrument': 'AGRI'}, self.file_type) + fy4 = FY4Base(None, {"platform_id": "FY4A", "instrument": "AGRI"}, self.file_type) with pytest.raises(NotImplementedError): - fy4.calibrate(None, {'calibration': 'radiance'}, None, None) + fy4.calibrate(None, {"calibration": "radiance"}, None, None) def test_badplatform(self): """Test case where we pass a bad calibration type, radiance is not supported.""" with pytest.raises(KeyError): - FY4Base(None, {'platform_id': 'FY3D', 'instrument': 'AGRI'}, self.file_type) + FY4Base(None, {"platform_id": "FY3D", "instrument": "AGRI"}, self.file_type) diff --git a/satpy/tests/reader_tests/test_generic_image.py b/satpy/tests/reader_tests/test_generic_image.py index 1477586205..0ea143269f 100644 --- a/satpy/tests/reader_tests/test_generic_image.py +++ b/satpy/tests/reader_tests/test_generic_image.py @@ -41,12 +41,12 @@ def setUp(self): self.date = datetime(2018, 1, 1) # Create area definition - pcs_id = 'ETRS89 / LAEA Europe' + pcs_id = "ETRS89 / LAEA Europe" proj4_dict = "EPSG:3035" self.x_size = 100 self.y_size = 100 area_extent = (2426378.0132, 1528101.2618, 6293974.6215, 5446513.5222) - self.area_def = AreaDefinition('geotiff_area', pcs_id, pcs_id, + self.area_def = AreaDefinition("geotiff_area", pcs_id, pcs_id, proj4_dict, self.x_size, self.y_size, area_extent) @@ -65,56 +65,56 @@ def setUp(self): r_nan__[:10, :10] = np.nan r_nan__ = da.from_array(r_nan__, chunks=(50, 50)) - ds_l = xr.DataArray(da.stack([r__]), dims=('bands', 'y', 'x'), - attrs={'name': 'test_l', - 'start_time': self.date}) - ds_l['bands'] = ['L'] - ds_la = xr.DataArray(da.stack([r__, a__]), dims=('bands', 'y', 'x'), - attrs={'name': 'test_la', - 'start_time': self.date}) - ds_la['bands'] = ['L', 'A'] + ds_l = xr.DataArray(da.stack([r__]), dims=("bands", "y", "x"), + attrs={"name": "test_l", + "start_time": self.date}) + ds_l["bands"] = ["L"] + ds_la = xr.DataArray(da.stack([r__, a__]), dims=("bands", "y", "x"), + attrs={"name": "test_la", + "start_time": self.date}) + ds_la["bands"] = ["L", "A"] ds_rgb = xr.DataArray(da.stack([r__, g__, b__]), - dims=('bands', 'y', 'x'), - attrs={'name': 'test_rgb', - 'start_time': self.date}) - ds_rgb['bands'] = ['R', 'G', 'B'] + dims=("bands", "y", "x"), + attrs={"name": "test_rgb", + "start_time": self.date}) + ds_rgb["bands"] = ["R", "G", "B"] ds_rgba = xr.DataArray(da.stack([r__, g__, b__, a__]), - dims=('bands', 'y', 'x'), - attrs={'name': 'test_rgba', - 'start_time': self.date}) - ds_rgba['bands'] = ['R', 'G', 'B', 'A'] + dims=("bands", "y", "x"), + attrs={"name": "test_rgba", + "start_time": self.date}) + ds_rgba["bands"] = ["R", "G", "B", "A"] ds_l_nan = xr.DataArray(da.stack([r_nan__]), - dims=('bands', 'y', 'x'), - attrs={'name': 'test_l_nan', - 'start_time': self.date}) - ds_l_nan['bands'] = ['L'] + dims=("bands", "y", "x"), + attrs={"name": "test_l_nan", + "start_time": self.date}) + ds_l_nan["bands"] = ["L"] # Temp dir for the saved images self.base_dir = tempfile.mkdtemp() # Put the datasets to Scene for easy saving scn = Scene() - scn['l'] = ds_l - scn['l'].attrs['area'] = self.area_def - scn['la'] = ds_la - scn['la'].attrs['area'] = self.area_def - scn['rgb'] = ds_rgb - scn['rgb'].attrs['area'] = self.area_def - scn['rgba'] = ds_rgba - scn['rgba'].attrs['area'] = self.area_def - scn['l_nan'] = ds_l_nan - scn['l_nan'].attrs['area'] = self.area_def + scn["l"] = ds_l + scn["l"].attrs["area"] = self.area_def + scn["la"] = ds_la + scn["la"].attrs["area"] = self.area_def + scn["rgb"] = ds_rgb + scn["rgb"].attrs["area"] = self.area_def + scn["rgba"] = ds_rgba + scn["rgba"].attrs["area"] = self.area_def + scn["l_nan"] = ds_l_nan + scn["l_nan"].attrs["area"] = self.area_def # Save the images. Two images in PNG and two in GeoTIFF - scn.save_dataset('l', os.path.join(self.base_dir, 'test_l.png'), writer='simple_image') - scn.save_dataset('la', os.path.join(self.base_dir, '20180101_0000_test_la.png'), writer='simple_image') - scn.save_dataset('rgb', os.path.join(self.base_dir, '20180101_0000_test_rgb.tif'), writer='geotiff') - scn.save_dataset('rgba', os.path.join(self.base_dir, 'test_rgba.tif'), writer='geotiff') - scn.save_dataset('l_nan', os.path.join(self.base_dir, 'test_l_nan_fillvalue.tif'), - writer='geotiff', fill_value=0) - scn.save_dataset('l_nan', os.path.join(self.base_dir, 'test_l_nan_nofillvalue.tif'), - writer='geotiff') + scn.save_dataset("l", os.path.join(self.base_dir, "test_l.png"), writer="simple_image") + scn.save_dataset("la", os.path.join(self.base_dir, "20180101_0000_test_la.png"), writer="simple_image") + scn.save_dataset("rgb", os.path.join(self.base_dir, "20180101_0000_test_rgb.tif"), writer="geotiff") + scn.save_dataset("rgba", os.path.join(self.base_dir, "test_rgba.tif"), writer="geotiff") + scn.save_dataset("l_nan", os.path.join(self.base_dir, "test_l_nan_fillvalue.tif"), + writer="geotiff", fill_value=0) + scn.save_dataset("l_nan", os.path.join(self.base_dir, "test_l_nan_nofillvalue.tif"), + writer="geotiff") self.scn = scn @@ -130,87 +130,87 @@ def test_png_scene(self): """Test reading PNG images via satpy.Scene().""" from satpy import Scene - fname = os.path.join(self.base_dir, 'test_l.png') - scn = Scene(reader='generic_image', filenames=[fname]) - scn.load(['image']) - self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size)) - self.assertEqual(scn.sensor_names, {'images'}) - self.assertEqual(scn.start_time, None) - self.assertEqual(scn.end_time, None) - self.assertNotIn('area', scn['image'].attrs) - - fname = os.path.join(self.base_dir, '20180101_0000_test_la.png') - scn = Scene(reader='generic_image', filenames=[fname]) - scn.load(['image']) - data = da.compute(scn['image'].data) - self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size)) - self.assertEqual(scn.sensor_names, {'images'}) - self.assertEqual(scn.start_time, self.date) - self.assertEqual(scn.end_time, self.date) - self.assertNotIn('area', scn['image'].attrs) - self.assertEqual(np.sum(np.isnan(data)), 100) + fname = os.path.join(self.base_dir, "test_l.png") + scn = Scene(reader="generic_image", filenames=[fname]) + scn.load(["image"]) + assert scn["image"].shape == (1, self.y_size, self.x_size) + assert scn.sensor_names == {"images"} + assert scn.start_time is None + assert scn.end_time is None + assert "area" not in scn["image"].attrs + + fname = os.path.join(self.base_dir, "20180101_0000_test_la.png") + scn = Scene(reader="generic_image", filenames=[fname]) + scn.load(["image"]) + data = da.compute(scn["image"].data) + assert scn["image"].shape == (1, self.y_size, self.x_size) + assert scn.sensor_names == {"images"} + assert scn.start_time == self.date + assert scn.end_time == self.date + assert "area" not in scn["image"].attrs + assert np.sum(np.isnan(data)) == 100 def test_geotiff_scene(self): """Test reading TIFF images via satpy.Scene().""" from satpy import Scene - fname = os.path.join(self.base_dir, '20180101_0000_test_rgb.tif') - scn = Scene(reader='generic_image', filenames=[fname]) - scn.load(['image']) - self.assertEqual(scn['image'].shape, (3, self.y_size, self.x_size)) - self.assertEqual(scn.sensor_names, {'images'}) - self.assertEqual(scn.start_time, self.date) - self.assertEqual(scn.end_time, self.date) - self.assertEqual(scn['image'].area, self.area_def) - - fname = os.path.join(self.base_dir, 'test_rgba.tif') - scn = Scene(reader='generic_image', filenames=[fname]) - scn.load(['image']) - self.assertEqual(scn['image'].shape, (3, self.y_size, self.x_size)) - self.assertEqual(scn.sensor_names, {'images'}) - self.assertEqual(scn.start_time, None) - self.assertEqual(scn.end_time, None) - self.assertEqual(scn['image'].area, self.area_def) + fname = os.path.join(self.base_dir, "20180101_0000_test_rgb.tif") + scn = Scene(reader="generic_image", filenames=[fname]) + scn.load(["image"]) + assert scn["image"].shape == (3, self.y_size, self.x_size) + assert scn.sensor_names == {"images"} + assert scn.start_time == self.date + assert scn.end_time == self.date + assert scn["image"].area == self.area_def + + fname = os.path.join(self.base_dir, "test_rgba.tif") + scn = Scene(reader="generic_image", filenames=[fname]) + scn.load(["image"]) + assert scn["image"].shape == (3, self.y_size, self.x_size) + assert scn.sensor_names == {"images"} + assert scn.start_time is None + assert scn.end_time is None + assert scn["image"].area == self.area_def def test_geotiff_scene_nan(self): """Test reading TIFF images originally containing NaN values via satpy.Scene().""" from satpy import Scene - fname = os.path.join(self.base_dir, 'test_l_nan_fillvalue.tif') - scn = Scene(reader='generic_image', filenames=[fname]) - scn.load(['image']) - self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size)) - self.assertEqual(np.sum(scn['image'].data[0][:10, :10].compute()), 0) + fname = os.path.join(self.base_dir, "test_l_nan_fillvalue.tif") + scn = Scene(reader="generic_image", filenames=[fname]) + scn.load(["image"]) + assert scn["image"].shape == (1, self.y_size, self.x_size) + assert np.sum(scn["image"].data[0][:10, :10].compute()) == 0 - fname = os.path.join(self.base_dir, 'test_l_nan_nofillvalue.tif') - scn = Scene(reader='generic_image', filenames=[fname]) - scn.load(['image']) - self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size)) - self.assertTrue(np.all(np.isnan(scn['image'].data[0][:10, :10].compute()))) + fname = os.path.join(self.base_dir, "test_l_nan_nofillvalue.tif") + scn = Scene(reader="generic_image", filenames=[fname]) + scn.load(["image"]) + assert scn["image"].shape == (1, self.y_size, self.x_size) + assert np.all(np.isnan(scn["image"].data[0][:10, :10].compute())) def test_GenericImageFileHandler(self): """Test direct use of the reader.""" from satpy.readers.generic_image import GenericImageFileHandler - fname = os.path.join(self.base_dir, 'test_rgba.tif') - fname_info = {'start_time': self.date} + fname = os.path.join(self.base_dir, "test_rgba.tif") + fname_info = {"start_time": self.date} ftype_info = {} reader = GenericImageFileHandler(fname, fname_info, ftype_info) - foo = make_dataid(name='image') - self.assertTrue(reader.file_content) - self.assertEqual(reader.finfo['filename'], fname) - self.assertEqual(reader.finfo['start_time'], self.date) - self.assertEqual(reader.finfo['end_time'], self.date) - self.assertEqual(reader.area, self.area_def) - self.assertEqual(reader.get_area_def(None), self.area_def) - self.assertEqual(reader.start_time, self.date) - self.assertEqual(reader.end_time, self.date) + foo = make_dataid(name="image") + assert reader.file_content + assert reader.finfo["filename"] == fname + assert reader.finfo["start_time"] == self.date + assert reader.finfo["end_time"] == self.date + assert reader.area == self.area_def + assert reader.get_area_def(None) == self.area_def + assert reader.start_time == self.date + assert reader.end_time == self.date dataset = reader.get_dataset(foo, {}) - self.assertTrue(isinstance(dataset, xr.DataArray)) - self.assertIn('spatial_ref', dataset.coords) - self.assertTrue(np.all(np.isnan(dataset.data[:, :10, :10].compute()))) + assert isinstance(dataset, xr.DataArray) + assert "spatial_ref" in dataset.coords + assert np.all(np.isnan(dataset.data[:, :10, :10].compute())) def test_GenericImageFileHandler_masking_only_integer(self): """Test direct use of the reader.""" @@ -225,59 +225,59 @@ def __init__(self, filename, filename_info, filetype_info, file_content, **kwarg self.dataset_name = None self.file_content.update(kwargs) - data = self.scn['rgba'] + data = self.scn["rgba"] # do nothing if not integer float_data = data / 255. reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": float_data}) - self.assertIs(reader.get_dataset(make_dataid(name='image'), {}), float_data) + assert reader.get_dataset(make_dataid(name="image"), {}) is float_data # masking if integer data = data.astype(np.uint32) - self.assertEqual(data.bands.size, 4) + assert data.bands.size == 4 reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": data}) - ret_data = reader.get_dataset(make_dataid(name='image'), {}) - self.assertEqual(ret_data.bands.size, 3) + ret_data = reader.get_dataset(make_dataid(name="image"), {}) + assert ret_data.bands.size == 3 def test_GenericImageFileHandler_datasetid(self): """Test direct use of the reader.""" from satpy.readers.generic_image import GenericImageFileHandler - fname = os.path.join(self.base_dir, 'test_rgba.tif') - fname_info = {'start_time': self.date} + fname = os.path.join(self.base_dir, "test_rgba.tif") + fname_info = {"start_time": self.date} ftype_info = {} reader = GenericImageFileHandler(fname, fname_info, ftype_info) - foo = make_dataid(name='image-custom') - self.assertTrue(reader.file_content, 'file_content should be set') + foo = make_dataid(name="image-custom") + assert reader.file_content dataset = reader.get_dataset(foo, {}) - self.assertTrue(isinstance(dataset, xr.DataArray), 'dataset should be a xr.DataArray') + assert isinstance(dataset, xr.DataArray) def test_GenericImageFileHandler_nodata(self): """Test nodata handling with direct use of the reader.""" from satpy.readers.generic_image import GenericImageFileHandler - fname = os.path.join(self.base_dir, 'test_l_nan_fillvalue.tif') - fname_info = {'start_time': self.date} + fname = os.path.join(self.base_dir, "test_l_nan_fillvalue.tif") + fname_info = {"start_time": self.date} ftype_info = {} reader = GenericImageFileHandler(fname, fname_info, ftype_info) - foo = make_dataid(name='image-custom') - self.assertTrue(reader.file_content, 'file_content should be set') - info = {'nodata_handling': 'nan_mask'} + foo = make_dataid(name="image-custom") + assert reader.file_content + info = {"nodata_handling": "nan_mask"} dataset = reader.get_dataset(foo, info) - self.assertTrue(isinstance(dataset, xr.DataArray), 'dataset should be a xr.DataArray') - self.assertTrue(np.all(np.isnan(dataset.data[0][:10, :10].compute())), 'values should be np.nan') - self.assertTrue(np.isnan(dataset.attrs['_FillValue']), '_FillValue should be np.nan') + assert isinstance(dataset, xr.DataArray) + assert np.all(np.isnan(dataset.data[0][:10, :10].compute())) + assert np.isnan(dataset.attrs["_FillValue"]) - info = {'nodata_handling': 'fill_value'} + info = {"nodata_handling": "fill_value"} dataset = reader.get_dataset(foo, info) - self.assertTrue(isinstance(dataset, xr.DataArray), 'dataset should be a xr.DataArray') - self.assertEqual(np.sum(dataset.data[0][:10, :10].compute()), 0) - self.assertEqual(dataset.attrs['_FillValue'], 0) + assert isinstance(dataset, xr.DataArray) + assert np.sum(dataset.data[0][:10, :10].compute()) == 0 + assert dataset.attrs["_FillValue"] == 0 # default same as 'nodata_handling': 'fill_value' dataset = reader.get_dataset(foo, {}) - self.assertTrue(isinstance(dataset, xr.DataArray), 'dataset should be a xr.DataArray') - self.assertEqual(np.sum(dataset.data[0][:10, :10].compute()), 0) - self.assertEqual(dataset.attrs['_FillValue'], 0) + assert isinstance(dataset, xr.DataArray) + assert np.sum(dataset.data[0][:10, :10].compute()) == 0 + assert dataset.attrs["_FillValue"] == 0 diff --git a/satpy/tests/reader_tests/test_geocat.py b/satpy/tests/reader_tests/test_geocat.py index 91de6a4265..3263539917 100644 --- a/satpy/tests/reader_tests/test_geocat.py +++ b/satpy/tests/reader_tests/test_geocat.py @@ -43,64 +43,64 @@ class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { - '/attr/Platform_Name': filename_info['platform_shortname'], - '/attr/Element_Resolution': 2., - '/attr/Line_Resolution': 2., - '/attr/Subsatellite_Longitude': -70.2 if 'GOES' in filename_info['platform_shortname'] else 140.65, - 'pixel_longitude': DEFAULT_LON_DATA, - 'pixel_longitude/attr/scale_factor': 1., - 'pixel_longitude/attr/add_offset': 0., - 'pixel_longitude/shape': DEFAULT_FILE_SHAPE, - 'pixel_longitude/attr/_FillValue': np.nan, - 'pixel_latitude': DEFAULT_LAT_DATA, - 'pixel_latitude/attr/scale_factor': 1., - 'pixel_latitude/attr/add_offset': 0., - 'pixel_latitude/shape': DEFAULT_FILE_SHAPE, - 'pixel_latitude/attr/_FillValue': np.nan, + "/attr/Platform_Name": filename_info["platform_shortname"], + "/attr/Element_Resolution": 2., + "/attr/Line_Resolution": 2., + "/attr/Subsatellite_Longitude": -70.2 if "GOES" in filename_info["platform_shortname"] else 140.65, + "pixel_longitude": DEFAULT_LON_DATA, + "pixel_longitude/attr/scale_factor": 1., + "pixel_longitude/attr/add_offset": 0., + "pixel_longitude/shape": DEFAULT_FILE_SHAPE, + "pixel_longitude/attr/_FillValue": np.nan, + "pixel_latitude": DEFAULT_LAT_DATA, + "pixel_latitude/attr/scale_factor": 1., + "pixel_latitude/attr/add_offset": 0., + "pixel_latitude/shape": DEFAULT_FILE_SHAPE, + "pixel_latitude/attr/_FillValue": np.nan, } sensor = { - 'HIMAWARI-8': 'himawari8', - 'GOES-17': 'goesr', - 'GOES-16': 'goesr', - 'GOES-13': 'goes', - 'GOES-14': 'goes', - 'GOES-15': 'goes', - }[filename_info['platform_shortname']] - file_content['/attr/Sensor_Name'] = sensor - - if filename_info['platform_shortname'] == 'HIMAWARI-8': - file_content['pixel_longitude'] = DEFAULT_LON_DATA + 130. - - file_content['variable1'] = DEFAULT_FILE_DATA.astype(np.float32) - file_content['variable1/attr/_FillValue'] = -1 - file_content['variable1/attr/scale_factor'] = 1. - file_content['variable1/attr/add_offset'] = 0. - file_content['variable1/attr/units'] = '1' - file_content['variable1/shape'] = DEFAULT_FILE_SHAPE + "HIMAWARI-8": "himawari8", + "GOES-17": "goesr", + "GOES-16": "goesr", + "GOES-13": "goes", + "GOES-14": "goes", + "GOES-15": "goes", + }[filename_info["platform_shortname"]] + file_content["/attr/Sensor_Name"] = sensor + + if filename_info["platform_shortname"] == "HIMAWARI-8": + file_content["pixel_longitude"] = DEFAULT_LON_DATA + 130. + + file_content["variable1"] = DEFAULT_FILE_DATA.astype(np.float32) + file_content["variable1/attr/_FillValue"] = -1 + file_content["variable1/attr/scale_factor"] = 1. + file_content["variable1/attr/add_offset"] = 0. + file_content["variable1/attr/units"] = "1" + file_content["variable1/shape"] = DEFAULT_FILE_SHAPE # data with fill values - file_content['variable2'] = np.ma.masked_array( + file_content["variable2"] = np.ma.masked_array( DEFAULT_FILE_DATA.astype(np.float32), mask=np.zeros_like(DEFAULT_FILE_DATA)) - file_content['variable2'].mask[::5, ::5] = True - file_content['variable2/attr/_FillValue'] = -1 - file_content['variable2/attr/scale_factor'] = 1. - file_content['variable2/attr/add_offset'] = 0. - file_content['variable2/attr/units'] = '1' - file_content['variable2/shape'] = DEFAULT_FILE_SHAPE + file_content["variable2"].mask[::5, ::5] = True + file_content["variable2/attr/_FillValue"] = -1 + file_content["variable2/attr/scale_factor"] = 1. + file_content["variable2/attr/add_offset"] = 0. + file_content["variable2/attr/units"] = "1" + file_content["variable2/shape"] = DEFAULT_FILE_SHAPE # category - file_content['variable3'] = DEFAULT_FILE_DATA.astype(np.byte) - file_content['variable3/attr/_FillValue'] = -128 - file_content['variable3/attr/flag_meanings'] = "clear water supercooled mixed ice unknown" - file_content['variable3/attr/flag_values'] = [0, 1, 2, 3, 4, 5] - file_content['variable3/attr/units'] = '1' - file_content['variable3/shape'] = DEFAULT_FILE_SHAPE - - attrs = ('_FillValue', 'flag_meanings', 'flag_values', 'units') + file_content["variable3"] = DEFAULT_FILE_DATA.astype(np.byte) + file_content["variable3/attr/_FillValue"] = -128 + file_content["variable3/attr/flag_meanings"] = "clear water supercooled mixed ice unknown" + file_content["variable3/attr/flag_values"] = [0, 1, 2, 3, 4, 5] + file_content["variable3/attr/units"] = "1" + file_content["variable3/shape"] = DEFAULT_FILE_SHAPE + + attrs = ("_FillValue", "flag_meanings", "flag_values", "units") convert_file_content_to_data_array( file_content, attrs=attrs, - dims=('z', 'lines', 'elements')) + dims=("z", "lines", "elements")) return file_content @@ -113,9 +113,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.geocat import GEOCATFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(GEOCATFileHandler, '__bases__', (FakeNetCDF4FileHandler2,)) + self.p = mock.patch.object(GEOCATFileHandler, "__bases__", (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -128,7 +128,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'geocatL2.GOES-13.2015143.234500.nc', + "geocatL2.GOES-13.2015143.234500.nc", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) @@ -140,10 +140,10 @@ def test_init_with_kwargs(self): from satpy.readers import load_reader r = load_reader(self.reader_configs, xarray_kwargs={"decode_times": True}) loadables = r.select_files_from_pathnames([ - 'geocatL2.GOES-13.2015143.234500.nc', + "geocatL2.GOES-13.2015143.234500.nc", ]) assert len(loadables) == 1 - r.create_filehandlers(loadables, fh_kwargs={"xarray_kwargs": {'decode_times': True}}) + r.create_filehandlers(loadables, fh_kwargs={"xarray_kwargs": {"decode_times": True}}) # make sure we have some files assert r.file_handlers @@ -153,19 +153,19 @@ def test_load_all_old_goes(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.geocat.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.geocat.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'geocatL2.GOES-13.2015143.234500.nc', + "geocatL2.GOES-13.2015143.234500.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['variable1', - 'variable2', - 'variable3']) - self.assertEqual(len(datasets), 3) + datasets = r.load(["variable1", + "variable2", + "variable3"]) + assert len(datasets) == 3 for v in datasets.values(): - assert 'calibration' not in v.attrs - self.assertEqual(v.attrs['units'], '1') - self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) + assert "calibration" not in v.attrs + assert v.attrs["units"] == "1" + assert datasets["variable3"].attrs.get("flag_meanings") is not None def test_load_all_himawari8(self): """Test loading all test datasets from H8 NetCDF file.""" @@ -174,20 +174,20 @@ def test_load_all_himawari8(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.geocat.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.geocat.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'geocatL2.HIMAWARI-8.2017092.210730.R304.R20.nc', + "geocatL2.HIMAWARI-8.2017092.210730.R304.R20.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['variable1', - 'variable2', - 'variable3']) - self.assertEqual(len(datasets), 3) + datasets = r.load(["variable1", + "variable2", + "variable3"]) + assert len(datasets) == 3 for v in datasets.values(): - assert 'calibration' not in v.attrs - self.assertEqual(v.attrs['units'], '1') - self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) - self.assertIsInstance(datasets['variable1'].attrs['area'], AreaDefinition) + assert "calibration" not in v.attrs + assert v.attrs["units"] == "1" + assert datasets["variable3"].attrs.get("flag_meanings") is not None + assert isinstance(datasets["variable1"].attrs["area"], AreaDefinition) def test_load_all_goes17_hdf4(self): """Test loading all test datasets from GOES-17 HDF4 file.""" @@ -196,17 +196,17 @@ def test_load_all_goes17_hdf4(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.geocat.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.geocat.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'geocatL2.GOES-17.CONUS.2020041.163130.hdf', + "geocatL2.GOES-17.CONUS.2020041.163130.hdf", ]) r.create_filehandlers(loadables) - datasets = r.load(['variable1', - 'variable2', - 'variable3']) - self.assertEqual(len(datasets), 3) + datasets = r.load(["variable1", + "variable2", + "variable3"]) + assert len(datasets) == 3 for v in datasets.values(): - assert 'calibration' not in v.attrs - self.assertEqual(v.attrs['units'], '1') - self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings')) - self.assertIsInstance(datasets['variable1'].attrs['area'], AreaDefinition) + assert "calibration" not in v.attrs + assert v.attrs["units"] == "1" + assert datasets["variable3"].attrs.get("flag_meanings") is not None + assert isinstance(datasets["variable1"].attrs["area"], AreaDefinition) diff --git a/satpy/tests/reader_tests/test_geos_area.py b/satpy/tests/reader_tests/test_geos_area.py index d7d4c2a510..fb0bb6f19b 100644 --- a/satpy/tests/reader_tests/test_geos_area.py +++ b/satpy/tests/reader_tests/test_geos_area.py @@ -37,58 +37,58 @@ class TestGEOSProjectionUtil(unittest.TestCase): def make_pdict_ext(self, typ, scan): """Create a dictionary and extents to use in testing.""" if typ == 1: # Fulldisk - pdict = {'a': 6378169.00, - 'b': 6356583.80, - 'h': 35785831.00, - 'ssp_lon': 0.0, - 'nlines': 3712, - 'ncols': 3712, - 'a_name': 'geostest', - 'a_desc': 'test area', - 'p_id': 'test_area', - 'cfac': -13642337, - 'lfac': -13642337, - 'coff': 1856} - if scan == 'N2S': - pdict['scandir'] = 'N2S' - pdict['loff'] = 1856 + pdict = {"a": 6378169.00, + "b": 6356583.80, + "h": 35785831.00, + "ssp_lon": 0.0, + "nlines": 3712, + "ncols": 3712, + "a_name": "geostest", + "a_desc": "test area", + "p_id": "test_area", + "cfac": -13642337, + "lfac": -13642337, + "coff": 1856} + if scan == "N2S": + pdict["scandir"] = "N2S" + pdict["loff"] = 1856 extent = (5567248.28340708, 5567248.28340708, -5570248.686685662, -5570248.686685662) - if scan == 'S2N': - pdict['scandir'] = 'S2N' - pdict['loff'] = -1856 + if scan == "S2N": + pdict["scandir"] = "S2N" + pdict["loff"] = -1856 extent = (5567248.28340708, 5570248.686685662, -5570248.686685662, -5567248.28340708) if typ == 2: # One sector - pdict = {'a': 6378169.00, - 'b': 6356583.80, - 'h': 35785831.00, - 'ssp_lon': 0.0, - 'nlines': 464, - 'ncols': 3712, - 'a_name': 'geostest', - 'a_desc': 'test area', - 'p_id': 'test_area', - 'cfac': -13642337, - 'lfac': -13642337, - 'coff': 1856} - if scan == 'N2S': - pdict['scandir'] = 'N2S' - pdict['loff'] = 464 + pdict = {"a": 6378169.00, + "b": 6356583.80, + "h": 35785831.00, + "ssp_lon": 0.0, + "nlines": 464, + "ncols": 3712, + "a_name": "geostest", + "a_desc": "test area", + "p_id": "test_area", + "cfac": -13642337, + "lfac": -13642337, + "coff": 1856} + if scan == "N2S": + pdict["scandir"] = "N2S" + pdict["loff"] = 464 extent = (5567248.28340708, 1390686.9196223018, -5570248.686685662, -1500.2016392905093) - if scan == 'S2N': - pdict['scandir'] = 'S2N' - pdict['loff'] = 464 + if scan == "S2N": + pdict["scandir"] = "S2N" + pdict["loff"] = 464 extent = (5567248.28340708, -1390686.9196223018, -5570248.686685662, @@ -99,39 +99,39 @@ def make_pdict_ext(self, typ, scan): def test_geos_area(self): """Test area extent calculation with N->S scan then S->N scan.""" # North -> South full disk - pdict, extent = self.make_pdict_ext(1, 'N2S') + pdict, extent = self.make_pdict_ext(1, "N2S") aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) # South -> North full disk - pdict, extent = self.make_pdict_ext(1, 'S2N') + pdict, extent = self.make_pdict_ext(1, "S2N") aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) # North -> South one sector - pdict, extent = self.make_pdict_ext(2, 'N2S') + pdict, extent = self.make_pdict_ext(2, "N2S") aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) # South -> North one sector - pdict, extent = self.make_pdict_ext(2, 'S2N') + pdict, extent = self.make_pdict_ext(2, "S2N") aex = get_area_extent(pdict) np.testing.assert_allclose(aex, extent) def test_get_xy_from_linecol(self): """Test the scan angle calculation.""" - pdict, extent = self.make_pdict_ext(1, 'S2N') + pdict, extent = self.make_pdict_ext(1, "S2N") good_xy = [0.2690166648133674, -10.837528496767087] - factors = (pdict['lfac'], pdict['cfac']) - offsets = (pdict['loff'], pdict['coff']) + factors = (pdict["lfac"], pdict["cfac"]) + offsets = (pdict["loff"], pdict["coff"]) x, y = get_xy_from_linecol(400, 1800, offsets, factors) np.testing.assert_approx_equal(x, good_xy[0]) np.testing.assert_approx_equal(y, good_xy[1]) - pdict, extent = self.make_pdict_ext(2, 'N2S') + pdict, extent = self.make_pdict_ext(2, "N2S") good_xy = [0.2690166648133674, 0.30744761692956274] - factors = (pdict['lfac'], pdict['cfac']) - offsets = (pdict['loff'], pdict['coff']) + factors = (pdict["lfac"], pdict["cfac"]) + offsets = (pdict["loff"], pdict["coff"]) x, y = get_xy_from_linecol(400, 1800, offsets, factors) np.testing.assert_approx_equal(x, good_xy[0]) np.testing.assert_approx_equal(y, good_xy[1]) @@ -139,18 +139,18 @@ def test_get_xy_from_linecol(self): def test_get_area_definition(self): """Test the retrieval of the area definition.""" from pyresample.utils import proj4_radius_parameters - pdict, extent = self.make_pdict_ext(1, 'N2S') + pdict, extent = self.make_pdict_ext(1, "N2S") good_res = (-3000.4032785810186, -3000.4032785810186) a_def = get_area_definition(pdict, extent) - self.assertEqual(a_def.area_id, pdict['a_name']) - self.assertEqual(a_def.resolution, good_res) - self.assertEqual(a_def.proj_dict['proj'], 'geos') - self.assertEqual(a_def.proj_dict['units'], 'm') + assert a_def.area_id == pdict["a_name"] + assert a_def.resolution == good_res + assert a_def.proj_dict["proj"] == "geos" + assert a_def.proj_dict["units"] == "m" a, b = proj4_radius_parameters(a_def.proj_dict) - self.assertEqual(a, 6378169) - self.assertEqual(b, 6356583.8) - self.assertEqual(a_def.proj_dict['h'], 35785831) + assert a == 6378169 + assert b == 6356583.8 + assert a_def.proj_dict["h"] == 35785831 def test_sampling_to_lfac_cfac(self): """Test conversion from angular sampling to line/column offset.""" @@ -160,26 +160,25 @@ def test_sampling_to_lfac_cfac(self): def test_get_geos_area_naming(self): """Test the geos area naming function.""" - input_dict = {'platform_name': 'testplatform', - 'instrument_name': 'testinstrument', - 'resolution': 1000, - 'service_name': 'testservicename', - 'service_desc': 'testdesc'} + input_dict = {"platform_name": "testplatform", + "instrument_name": "testinstrument", + "resolution": 1000, + "service_name": "testservicename", + "service_desc": "testdesc"} output_dict = get_geos_area_naming(input_dict) - self.assertEqual(output_dict['area_id'], 'testplatform_testinstrument_testservicename_1km') - self.assertEqual(output_dict['description'], 'TESTPLATFORM TESTINSTRUMENT testdesc area definition' - ' with 1 km resolution') + assert output_dict["area_id"] == "testplatform_testinstrument_testservicename_1km" + assert output_dict["description"] == "TESTPLATFORM TESTINSTRUMENT testdesc area definition with 1 km resolution" def test_get_resolution_and_unit_strings_in_km(self): """Test the resolution and unit strings function for a km resolution.""" out = get_resolution_and_unit_strings(1000) - self.assertEqual(out['value'], '1') - self.assertEqual(out['unit'], 'km') + assert out["value"] == "1" + assert out["unit"] == "km" def test_get_resolution_and_unit_strings_in_m(self): """Test the resolution and unit strings function for a m resolution.""" out = get_resolution_and_unit_strings(500) - self.assertEqual(out['value'], '500') - self.assertEqual(out['unit'], 'm') + assert out["value"] == "500" + assert out["unit"] == "m" diff --git a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py index eb06362831..0333f3df2b 100644 --- a/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py +++ b/satpy/tests/reader_tests/test_gerb_l2_hr_h5.py @@ -37,10 +37,10 @@ def make_h5_null_string(length): def write_h5_null_string_att(loc_id, name, s): """Write a NULL terminated string attribute at loc_id.""" dt = make_h5_null_string(length=7) - name = bytes(name.encode('ascii')) - s = bytes(s.encode('ascii')) + name = bytes(name.encode("ascii")) + s = bytes(s.encode("ascii")) at = h5py.h5a.create(loc_id, name, dt, h5py.h5s.create(h5py.h5s.SCALAR)) - at.write(np.array(s, dtype=f'|S{len(s)+1}')) + at.write(np.array(s, dtype=f"|S{len(s)+1}")) @pytest.fixture(scope="session") @@ -48,74 +48,74 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): """Create a dummy HDF5 file for the GERB L2 HR product.""" filename = tmp_path_factory.mktemp("data") / FNAME - with h5py.File(filename, 'w') as fid: - fid.create_group('/Angles') - fid['/Angles/Relative Azimuth'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Angles/Relative Azimuth'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') - fid['/Angles/Solar Zenith'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Angles/Solar Zenith'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') - write_h5_null_string_att(fid['/Angles/Relative Azimuth'].id, 'Unit', 'Degree') - fid['/Angles/Viewing Azimuth'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Angles/Viewing Azimuth'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') - write_h5_null_string_att(fid['/Angles/Viewing Azimuth'].id, 'Unit', 'Degree') - fid['/Angles/Viewing Zenith'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Angles/Viewing Zenith'].attrs['Quantisation Factor'] = np.array(0.1, dtype='float64') - write_h5_null_string_att(fid['/Angles/Viewing Zenith'].id, 'Unit', 'Degree') - fid.create_group('/GERB') + with h5py.File(filename, "w") as fid: + fid.create_group("/Angles") + fid["/Angles/Relative Azimuth"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Angles/Relative Azimuth"].attrs["Quantisation Factor"] = np.array(0.1, dtype="float64") + fid["/Angles/Solar Zenith"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Angles/Solar Zenith"].attrs["Quantisation Factor"] = np.array(0.1, dtype="float64") + write_h5_null_string_att(fid["/Angles/Relative Azimuth"].id, "Unit", "Degree") + fid["/Angles/Viewing Azimuth"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Angles/Viewing Azimuth"].attrs["Quantisation Factor"] = np.array(0.1, dtype="float64") + write_h5_null_string_att(fid["/Angles/Viewing Azimuth"].id, "Unit", "Degree") + fid["/Angles/Viewing Zenith"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Angles/Viewing Zenith"].attrs["Quantisation Factor"] = np.array(0.1, dtype="float64") + write_h5_null_string_att(fid["/Angles/Viewing Zenith"].id, "Unit", "Degree") + fid.create_group("/GERB") dt = h5py.h5t.TypeID.copy(h5py.h5t.C_S1) dt.set_size(3) dt.set_strpad(h5py.h5t.STR_NULLTERM) - write_h5_null_string_att(fid['/GERB'].id, 'Instrument Identifier', 'G4') - fid.create_group('/GGSPS') - fid['/GGSPS'].attrs['L1.5 NANRG Product Version'] = np.array(-1, dtype='int32') - fid.create_group('/Geolocation') - write_h5_null_string_att(fid['/Geolocation'].id, 'Geolocation File Name', - 'G4_SEV4_L20_HR_GEO_20180111_181500_V010.hdf') - fid['/Geolocation'].attrs['Nominal Satellite Longitude (degrees)'] = np.array(0.0, dtype='float64') - fid.create_group('/Imager') - fid['/Imager'].attrs['Instrument Identifier'] = np.array(4, dtype='int32') - write_h5_null_string_att(fid['/Imager'].id, 'Type', 'SEVIRI') - fid.create_group('/RMIB') - fid.create_group('/Radiometry') - fid['/Radiometry'].attrs['SEVIRI Radiance Definition Flag'] = np.array(2, dtype='int32') - fid['/Radiometry/A Values (per GERB detector cell)'] = np.ones(shape=(256,), dtype=np.dtype('>f8')) - fid['/Radiometry/C Values (per GERB detector cell)'] = np.ones(shape=(256,), dtype=np.dtype('>f8')) - fid['/Radiometry/Longwave Correction'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Radiometry/Longwave Correction'].attrs['Offset'] = np.array(1.0, dtype='float64') - fid['/Radiometry/Longwave Correction'].attrs['Quantisation Factor'] = np.array(0.005, dtype='float64') - fid['/Radiometry/Shortwave Correction'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Radiometry/Shortwave Correction'].attrs['Offset'] = np.array(1.0, dtype='float64') - fid['/Radiometry/Shortwave Correction'].attrs['Quantisation Factor'] = np.array(0.005, dtype='float64') - fid['/Radiometry/Solar Flux'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Radiometry/Solar Flux'].attrs['Quantisation Factor'] = np.array(0.25, dtype='float64') - write_h5_null_string_att(fid['/Radiometry/Solar Flux'].id, 'Unit', 'Watt per square meter') - fid['/Radiometry/Solar Radiance'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Radiometry/Solar Radiance'].attrs['Quantisation Factor'] = np.array(0.05, dtype='float64') - write_h5_null_string_att(fid['/Radiometry/Solar Radiance'].id, 'Unit', 'Watt per square meter per steradian') - fid['/Radiometry/Thermal Flux'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Radiometry/Thermal Flux'].attrs['Quantisation Factor'] = np.array(0.25, dtype='float64') - write_h5_null_string_att(fid['/Radiometry/Thermal Flux'].id, 'Unit', 'Watt per square meter') - fid['/Radiometry/Thermal Radiance'] = np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Radiometry/Thermal Radiance'].attrs['Quantisation Factor'] = np.array(0.05, dtype='float64') - write_h5_null_string_att(fid['/Radiometry/Thermal Radiance'].id, 'Unit', 'Watt per square meter per steradian') - fid.create_group('/Scene Identification') - write_h5_null_string_att(fid['/Scene Identification'].id, - 'Solar Angular Dependency Models Set Version', 'CERES_TRMM.1') - write_h5_null_string_att(fid['/Scene Identification'].id, - 'Thermal Angular Dependency Models Set Version', 'RMIB.3') - fid['/Scene Identification/Cloud Cover'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) - fid['/Scene Identification/Cloud Cover'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') - write_h5_null_string_att(fid['/Scene Identification/Cloud Cover'].id, 'Unit', 'Percent') - fid['/Scene Identification/Cloud Optical Depth (logarithm)'] = \ - np.ones(shape=(1237, 1237), dtype=np.dtype('>i2')) - fid['/Scene Identification/Cloud Optical Depth (logarithm)'].attrs['Quantisation Factor'] = \ - np.array(0.00025, dtype='float64') - fid['/Scene Identification/Cloud Phase'] = np.ones(shape=(1237, 1237), dtype=np.dtype('uint8')) - fid['/Scene Identification/Cloud Phase'].attrs['Quantisation Factor'] = np.array(0.01, dtype='float64') - write_h5_null_string_att(fid['/Scene Identification/Cloud Phase'].id, 'Unit', - 'Percent (Water=0%,Mixed,Ice=100%)') - fid.create_group('/Times') - fid['/Times/Time (per row)'] = np.ones(shape=(1237,), dtype=np.dtype('|S22')) + write_h5_null_string_att(fid["/GERB"].id, "Instrument Identifier", "G4") + fid.create_group("/GGSPS") + fid["/GGSPS"].attrs["L1.5 NANRG Product Version"] = np.array(-1, dtype="int32") + fid.create_group("/Geolocation") + write_h5_null_string_att(fid["/Geolocation"].id, "Geolocation File Name", + "G4_SEV4_L20_HR_GEO_20180111_181500_V010.hdf") + fid["/Geolocation"].attrs["Nominal Satellite Longitude (degrees)"] = np.array(0.0, dtype="float64") + fid.create_group("/Imager") + fid["/Imager"].attrs["Instrument Identifier"] = np.array(4, dtype="int32") + write_h5_null_string_att(fid["/Imager"].id, "Type", "SEVIRI") + fid.create_group("/RMIB") + fid.create_group("/Radiometry") + fid["/Radiometry"].attrs["SEVIRI Radiance Definition Flag"] = np.array(2, dtype="int32") + fid["/Radiometry/A Values (per GERB detector cell)"] = np.ones(shape=(256,), dtype=np.dtype(">f8")) + fid["/Radiometry/C Values (per GERB detector cell)"] = np.ones(shape=(256,), dtype=np.dtype(">f8")) + fid["/Radiometry/Longwave Correction"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Radiometry/Longwave Correction"].attrs["Offset"] = np.array(1.0, dtype="float64") + fid["/Radiometry/Longwave Correction"].attrs["Quantisation Factor"] = np.array(0.005, dtype="float64") + fid["/Radiometry/Shortwave Correction"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Radiometry/Shortwave Correction"].attrs["Offset"] = np.array(1.0, dtype="float64") + fid["/Radiometry/Shortwave Correction"].attrs["Quantisation Factor"] = np.array(0.005, dtype="float64") + fid["/Radiometry/Solar Flux"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Radiometry/Solar Flux"].attrs["Quantisation Factor"] = np.array(0.25, dtype="float64") + write_h5_null_string_att(fid["/Radiometry/Solar Flux"].id, "Unit", "Watt per square meter") + fid["/Radiometry/Solar Radiance"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Radiometry/Solar Radiance"].attrs["Quantisation Factor"] = np.array(0.05, dtype="float64") + write_h5_null_string_att(fid["/Radiometry/Solar Radiance"].id, "Unit", "Watt per square meter per steradian") + fid["/Radiometry/Thermal Flux"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Radiometry/Thermal Flux"].attrs["Quantisation Factor"] = np.array(0.25, dtype="float64") + write_h5_null_string_att(fid["/Radiometry/Thermal Flux"].id, "Unit", "Watt per square meter") + fid["/Radiometry/Thermal Radiance"] = np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Radiometry/Thermal Radiance"].attrs["Quantisation Factor"] = np.array(0.05, dtype="float64") + write_h5_null_string_att(fid["/Radiometry/Thermal Radiance"].id, "Unit", "Watt per square meter per steradian") + fid.create_group("/Scene Identification") + write_h5_null_string_att(fid["/Scene Identification"].id, + "Solar Angular Dependency Models Set Version", "CERES_TRMM.1") + write_h5_null_string_att(fid["/Scene Identification"].id, + "Thermal Angular Dependency Models Set Version", "RMIB.3") + fid["/Scene Identification/Cloud Cover"] = np.ones(shape=(1237, 1237), dtype=np.dtype("uint8")) + fid["/Scene Identification/Cloud Cover"].attrs["Quantisation Factor"] = np.array(0.01, dtype="float64") + write_h5_null_string_att(fid["/Scene Identification/Cloud Cover"].id, "Unit", "Percent") + fid["/Scene Identification/Cloud Optical Depth (logarithm)"] = \ + np.ones(shape=(1237, 1237), dtype=np.dtype(">i2")) + fid["/Scene Identification/Cloud Optical Depth (logarithm)"].attrs["Quantisation Factor"] = \ + np.array(0.00025, dtype="float64") + fid["/Scene Identification/Cloud Phase"] = np.ones(shape=(1237, 1237), dtype=np.dtype("uint8")) + fid["/Scene Identification/Cloud Phase"].attrs["Quantisation Factor"] = np.array(0.01, dtype="float64") + write_h5_null_string_att(fid["/Scene Identification/Cloud Phase"].id, "Unit", + "Percent (Water=0%,Mixed,Ice=100%)") + fid.create_group("/Times") + fid["/Times/Time (per row)"] = np.ones(shape=(1237,), dtype=np.dtype("|S22")) return filename @@ -123,7 +123,7 @@ def gerb_l2_hr_h5_dummy_file(tmp_path_factory): @pytest.mark.parametrize("name", ["Solar Flux", "Thermal Flux", "Solar Radiance", "Thermal Radiance"]) def test_dataset_load(gerb_l2_hr_h5_dummy_file, name): """Test loading the solar flux component.""" - scene = Scene(reader='gerb_l2_hr_h5', filenames=[gerb_l2_hr_h5_dummy_file]) + scene = Scene(reader="gerb_l2_hr_h5", filenames=[gerb_l2_hr_h5_dummy_file]) scene.load([name]) assert scene[name].shape == (1237, 1237) assert np.nanmax((scene[name].to_numpy().flatten() - 0.25)) < 1e-6 diff --git a/satpy/tests/reader_tests/test_ghi_l1.py b/satpy/tests/reader_tests/test_ghi_l1.py index 79667ef37d..2b6ff4af54 100644 --- a/satpy/tests/reader_tests/test_ghi_l1.py +++ b/satpy/tests/reader_tests/test_ghi_l1.py @@ -33,7 +33,7 @@ CHANNELS_BY_RESOLUTION = {250: ["C01"], 500: ["C01", "C02", "C03", "C04", "C05", "C06"], 2000: ALL_BAND_NAMES, - 'GEO': 'solar_azimuth_angle' + "GEO": "solar_azimuth_angle" } AREA_EXTENTS_BY_RESOLUTION = { @@ -48,58 +48,58 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): def make_test_data(self, cwl, ch, prefix, dims, file_type): """Make test data.""" - if prefix == 'CAL': + if prefix == "CAL": data = xr.DataArray( da.from_array((np.arange(10.) + 1.) / 10., [dims[0] * dims[1]]), attrs={ - 'Slope': np.array(1.), 'Intercept': np.array(0.), - 'FillValue': np.array(-65535.0), - 'units': 'NUL', - 'center_wavelength': '{}um'.format(cwl).encode('utf-8'), - 'band_names': 'band{}(band number is range from 1 to 14)' - .format(ch).encode('utf-8'), - 'long_name': 'Calibration table of {}um Channel'.format(cwl).encode('utf-8'), - 'valid_range': np.array([0, 1.5]), + "Slope": np.array(1.), "Intercept": np.array(0.), + "FillValue": np.array(-65535.0), + "units": "NUL", + "center_wavelength": "{}um".format(cwl).encode("utf-8"), + "band_names": "band{}(band number is range from 1 to 14)" + .format(ch).encode("utf-8"), + "long_name": "Calibration table of {}um Channel".format(cwl).encode("utf-8"), + "valid_range": np.array([0, 1.5]), }, - dims='_const') + dims="_const") - elif prefix == 'NOM': + elif prefix == "NOM": data = xr.DataArray( da.from_array(np.arange(10, dtype=np.uint16).reshape((2, 5)) + 1, [dim for dim in dims]), attrs={ - 'Slope': np.array(1.), 'Intercept': np.array(0.), - 'FillValue': np.array(65535), - 'units': 'DN', - 'center_wavelength': '{}um'.format(cwl).encode('utf-8'), - 'band_names': 'band{}(band number is range from 1 to 7)' - .format(ch).encode('utf-8'), - 'long_name': 'Calibration table of {}um Channel'.format(cwl).encode('utf-8'), - 'valid_range': np.array([0, 4095]), + "Slope": np.array(1.), "Intercept": np.array(0.), + "FillValue": np.array(65535), + "units": "DN", + "center_wavelength": "{}um".format(cwl).encode("utf-8"), + "band_names": "band{}(band number is range from 1 to 7)" + .format(ch).encode("utf-8"), + "long_name": "Calibration table of {}um Channel".format(cwl).encode("utf-8"), + "valid_range": np.array([0, 4095]), }, - dims=('_RegLength', '_RegWidth')) + dims=("_RegLength", "_RegWidth")) - elif prefix == 'GEO': + elif prefix == "GEO": data = xr.DataArray( da.from_array(np.arange(10, dtype=np.float32).reshape((2, 5)) + 1, [dim for dim in dims]), attrs={ - 'Slope': np.array(1.), 'Intercept': np.array(0.), - 'FillValue': np.array(65535.), - 'units': 'NUL', - 'band_names': 'NUL', - 'valid_range': np.array([0., 360.]), + "Slope": np.array(1.), "Intercept": np.array(0.), + "FillValue": np.array(65535.), + "units": "NUL", + "band_names": "NUL", + "valid_range": np.array([0., 360.]), }, - dims=('_RegLength', '_RegWidth')) + dims=("_RegLength", "_RegWidth")) - elif prefix == 'COEF': - if file_type == '250': + elif prefix == "COEF": + if file_type == "250": data = self._create_coeff_array(1) - elif file_type == '500': + elif file_type == "500": data = self._create_coeff_array(6) - elif file_type == '2000': + elif file_type == "2000": data = self._create_coeff_array(7) return data @@ -109,14 +109,14 @@ def _create_coeff_array(self, nb_channels): da.from_array((np.arange(nb_channels * 2).reshape((nb_channels, 2)) + 1.) / np.array([1E4, 1E2]), [nb_channels, 2]), attrs={ - 'Slope': 1., 'Intercept': 0., - 'FillValue': 0, - 'units': 'NUL', - 'band_names': 'NUL', - 'long_name': b'Calibration coefficient (SCALE and OFFSET)', - 'valid_range': [-500, 500], + "Slope": 1., "Intercept": 0., + "FillValue": 0, + "units": "NUL", + "band_names": "NUL", + "long_name": b"Calibration coefficient (SCALE and OFFSET)", + "valid_range": [-500, 500], }, - dims=('_num_channel', '_coefs')) + dims=("_num_channel", "_coefs")) return data def _create_channel_data(self, chs, cwls, file_type): @@ -124,11 +124,11 @@ def _create_channel_data(self, chs, cwls, file_type): dim_1 = 5 data = {} for index, _cwl in enumerate(cwls): - data['Calibration/CALChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'CAL', + data["Calibration/CALChannel" + "%02d" % chs[index]] = self.make_test_data(cwls[index], chs[index], "CAL", [dim_0, dim_1], file_type) - data['Data/NOMChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'NOM', + data["Data/NOMChannel" + "%02d" % chs[index]] = self.make_test_data(cwls[index], chs[index], "NOM", [dim_0, dim_1], file_type) - data['Calibration/CALIBRATION_COEF(SCALE+OFFSET)'] = self.make_test_data(cwls[index], chs[index], 'COEF', + data["Calibration/CALIBRATION_COEF(SCALE+OFFSET)"] = self.make_test_data(cwls[index], chs[index], "COEF", [dim_0, dim_1], file_type) return data @@ -156,39 +156,39 @@ def _get_2km_data(self, file_type): def _get_geo_data(self, file_type): dim_0 = 2 dim_1 = 5 - data = {'Navigation/NOMSunAzimuth': self.make_test_data('NUL', 'NUL', 'GEO', + data = {"Navigation/NOMSunAzimuth": self.make_test_data("NUL", "NUL", "GEO", [dim_0, dim_1], file_type)} return data def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" global_attrs = { - '/attr/NOMSubSatLat': np.array(0.0), - '/attr/NOMSubSatLon': np.array(133.0), - '/attr/NOMSatHeight': np.array(3.5786E7), - '/attr/Semi_major_axis': np.array(6378.14), - '/attr/Semi_minor_axis': np.array(6353.28), - '/attr/OBIType': 'REGX', - '/attr/RegLength': np.array(2.0), - '/attr/RegWidth': np.array(5.0), - '/attr/Corner-Point Latitudes': np.array((4.1, 5.1, 4.1, 5.1)), - '/attr/Corner-Point Longitudes': np.array((141.1, 141.1, 141.1, 151.1)), - '/attr/Begin Line Number': np.array(0), - '/attr/End Line Number': np.array(1), - '/attr/Observing Beginning Date': '2019-06-03', '/attr/Observing Beginning Time': '00:30:01.807', - '/attr/Observing Ending Date': '2019-06-03', '/attr/Observing Ending Time': '00:34:07.572', - '/attr/Satellite Name': 'FY4B', '/attr/Sensor Identification Code': 'GHI', '/attr/Sensor Name': 'GHI', + "/attr/NOMSubSatLat": np.array(0.0), + "/attr/NOMSubSatLon": np.array(133.0), + "/attr/NOMSatHeight": np.array(3.5786E7), + "/attr/Semi_major_axis": np.array(6378.14), + "/attr/Semi_minor_axis": np.array(6353.28), + "/attr/OBIType": "REGX", + "/attr/RegLength": np.array(2.0), + "/attr/RegWidth": np.array(5.0), + "/attr/Corner-Point Latitudes": np.array((4.1, 5.1, 4.1, 5.1)), + "/attr/Corner-Point Longitudes": np.array((141.1, 141.1, 141.1, 151.1)), + "/attr/Begin Line Number": np.array(0), + "/attr/End Line Number": np.array(1), + "/attr/Observing Beginning Date": "2019-06-03", "/attr/Observing Beginning Time": "00:30:01.807", + "/attr/Observing Ending Date": "2019-06-03", "/attr/Observing Ending Time": "00:34:07.572", + "/attr/Satellite Name": "FY4B", "/attr/Sensor Identification Code": "GHI", "/attr/Sensor Name": "GHI", } data = {} - if self.filetype_info['file_type'] == 'ghi_l1_0250m': - data = self._get_250m_data('250') - elif self.filetype_info['file_type'] == 'ghi_l1_0500m': - data = self._get_500m_data('500') - elif self.filetype_info['file_type'] == 'ghi_l1_2000m': - data = self._get_2km_data('2000') - elif self.filetype_info['file_type'] == 'ghi_l1_2000m_geo': - data = self._get_geo_data('2000') + if self.filetype_info["file_type"] == "ghi_l1_0250m": + data = self._get_250m_data("250") + elif self.filetype_info["file_type"] == "ghi_l1_0500m": + data = self._get_500m_data("500") + elif self.filetype_info["file_type"] == "ghi_l1_2000m": + data = self._get_2km_data("2000") + elif self.filetype_info["file_type"] == "ghi_l1_2000m_geo": + data = self._get_geo_data("2000") test_content = {} test_content.update(global_attrs) @@ -199,7 +199,7 @@ def get_test_content(self, filename, filename_info, filetype_info): def _create_filenames_from_resolutions(*resolutions): """Create filenames from the given resolutions.""" - if 'GEO' in resolutions: + if "GEO" in resolutions: return ["FY4B-_GHI---_N_REGX_1330E_L1-_GEO-_MULT_NOM_20220613145300_20220613145359_2000M_V0001.HDF"] pattern = ("FY4B-_GHI---_N_REGX_1330E_L1-_FDI-_MULT_NOM_20220613145300_20220613145359_" "{resolution:04d}M_V0001.HDF") @@ -216,21 +216,21 @@ def setup_method(self): from satpy._config import config_search_paths from satpy.readers.fy4_base import FY4Base from satpy.readers.ghi_l1 import HDF_GHI_L1 - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.fy4 = mock.patch.object(FY4Base, '__bases__', (FakeHDF5FileHandler2,)) + self.fy4 = mock.patch.object(FY4Base, "__bases__", (FakeHDF5FileHandler2,)) self.p = mock.patch.object(HDF_GHI_L1.__class__, (self.fy4,)) self.fake_handler = self.fy4.start() self.p.is_local = True self.expected = { - 'C01': np.array([[2.01, 2.02, 2.03, 2.04, 2.05], [2.06, 2.07, 2.08, 2.09, 2.1]]), - 'C02': np.array([[4.03, 4.06, 4.09, 4.12, 4.15], [4.18, 4.21, 4.24, 4.27, 4.3]]), - 'C03': np.array([[6.05, 6.1, 6.15, 6.2, 6.25], [6.3, 6.35, 6.4, 6.45, 6.5]]), - 'C04': np.array([[8.07, 8.14, 8.21, 8.28, 8.35], [8.42, 8.49, 8.56, 8.63, 8.7]]), - 'C05': np.array([[10.09, 10.18, 10.27, 10.36, 10.45], [10.54, 10.63, 10.72, 10.81, 10.9]]), - 'C06': np.array([[12.11, 12.22, 12.33, 12.44, 12.55], [12.66, 12.77, 12.88, 12.99, 13.1]]), - 'C07': np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), + "C01": np.array([[2.01, 2.02, 2.03, 2.04, 2.05], [2.06, 2.07, 2.08, 2.09, 2.1]]), + "C02": np.array([[4.03, 4.06, 4.09, 4.12, 4.15], [4.18, 4.21, 4.24, 4.27, 4.3]]), + "C03": np.array([[6.05, 6.1, 6.15, 6.2, 6.25], [6.3, 6.35, 6.4, 6.45, 6.5]]), + "C04": np.array([[8.07, 8.14, 8.21, 8.28, 8.35], [8.42, 8.49, 8.56, 8.63, 8.7]]), + "C05": np.array([[10.09, 10.18, 10.27, 10.36, 10.45], [10.54, 10.63, 10.72, 10.81, 10.9]]), + "C06": np.array([[12.11, 12.22, 12.33, 12.44, 12.55], [12.66, 12.77, 12.88, 12.99, 13.1]]), + "C07": np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]), } def teardown_method(self): @@ -266,12 +266,12 @@ def test_ghi_orbital_parameters_are_correct(self): res = reader.load(band_names) # check whether the data type of orbital_parameters is float - orbital_parameters = res[band_names[0]].attrs['orbital_parameters'] + orbital_parameters = res[band_names[0]].attrs["orbital_parameters"] for attr in orbital_parameters: assert isinstance(orbital_parameters[attr], float) - assert orbital_parameters['satellite_nominal_latitude'] == 0. - assert orbital_parameters['satellite_nominal_longitude'] == 133.0 - assert orbital_parameters['satellite_nominal_altitude'] == 3.5786E7 + assert orbital_parameters["satellite_nominal_latitude"] == 0. + assert orbital_parameters["satellite_nominal_longitude"] == 133.0 + assert orbital_parameters["satellite_nominal_altitude"] == 3.5786E7 @staticmethod def _check_keys_for_dsq(available_datasets, resolution_to_test): @@ -282,7 +282,7 @@ def _check_keys_for_dsq(available_datasets, resolution_to_test): for band_name in band_names: ds_q = make_dsq(name=band_name, resolution=resolution_to_test) res = get_key(ds_q, available_datasets, num_results=0, best=False) - if band_name < 'C07': + if band_name < "C07": assert len(res) == 2 else: assert len(res) == 3 @@ -295,21 +295,21 @@ def test_ghi_counts_calibration(self): ds_ids = [] band_names = CHANNELS_BY_RESOLUTION[2000] for band_name in band_names: - ds_ids.append(make_dsq(name=band_name, calibration='counts')) + ds_ids.append(make_dsq(name=band_name, calibration="counts")) res = reader.load(ds_ids) assert len(res) == 7 for band_name in band_names: assert res[band_name].shape == (2, 5) - assert res[band_name].attrs['calibration'] == "counts" + assert res[band_name].attrs["calibration"] == "counts" assert res[band_name].dtype == np.uint16 - assert res[band_name].attrs['units'] == "1" + assert res[band_name].attrs["units"] == "1" def test_ghi_geo(self): """Test loading data for angles.""" from satpy.tests.utils import make_dsq - reader = self._create_reader_for_resolutions('GEO') - band_name = 'solar_azimuth_angle' + reader = self._create_reader_for_resolutions("GEO") + band_name = "solar_azimuth_angle" ds_ids = [make_dsq(name=band_name)] res = reader.load(ds_ids) assert len(res) == 1 @@ -339,26 +339,26 @@ def test_ghi_for_one_resolution(self, resolution_to_test): assert len(res) == len(band_names) self._check_calibration_and_units(band_names, res) for band_name in band_names: - np.testing.assert_allclose(np.array(res[band_name].attrs['area'].area_extent), + np.testing.assert_allclose(np.array(res[band_name].attrs["area"].area_extent), np.array(AREA_EXTENTS_BY_RESOLUTION[resolution_to_test])) def _check_calibration_and_units(self, band_names, result): for band_name in band_names: - assert result[band_name].attrs['sensor'].islower() + assert result[band_name].attrs["sensor"].islower() assert result[band_name].shape == (2, 5) np.testing.assert_allclose(result[band_name].values, self.expected[band_name], equal_nan=True) self._check_units(band_name, result) @staticmethod def _check_units(band_name, result): - if band_name <= 'C06': - assert result[band_name].attrs['calibration'] == "reflectance" + if band_name <= "C06": + assert result[band_name].attrs["calibration"] == "reflectance" else: - assert result[band_name].attrs['calibration'] == 'brightness_temperature' - if band_name <= 'C06': - assert result[band_name].attrs['units'] == "%" + assert result[band_name].attrs["calibration"] == "brightness_temperature" + if band_name <= "C06": + assert result[band_name].attrs["units"] == "%" else: - assert result[band_name].attrs['units'] == "K" + assert result[band_name].attrs["units"] == "K" @staticmethod def _assert_which_channels_are_loaded(available_datasets, band_names, resolution_to_test): @@ -375,7 +375,7 @@ def _assert_which_channels_are_loaded(available_datasets, band_names, resolution ds_q = make_dsq(name=band_name, resolution=resolution_to_test) res = get_key(ds_q, available_datasets, num_results=0, best=False) - if band_name < 'C07': + if band_name < "C07": assert len(res) == 2 else: assert len(res) == 3 diff --git a/satpy/tests/reader_tests/test_ghrsst_l2.py b/satpy/tests/reader_tests/test_ghrsst_l2.py index e33cec467a..66c030e91d 100644 --- a/satpy/tests/reader_tests/test_ghrsst_l2.py +++ b/satpy/tests/reader_tests/test_ghrsst_l2.py @@ -39,35 +39,35 @@ def setup_method(self, tmp_path): self.lat_data = np.array(([43.43, 55.56, 61.25], [41.38, 50.28, 60.80])) self.lon = xr.DataArray( self.lon_data, - dims=('nj', 'ni'), - attrs={'standard_name': 'longitude', - 'units': 'degrees_east', + dims=("nj", "ni"), + attrs={"standard_name": "longitude", + "units": "degrees_east", } ) self.lat = xr.DataArray( self.lat_data, - dims=('nj', 'ni'), - attrs={'standard_name': 'latitude', - 'units': 'degrees_north', + dims=("nj", "ni"), + attrs={"standard_name": "latitude", + "units": "degrees_north", } ) self.sst = xr.DataArray( self.base_data, - dims=('nj', 'ni'), - attrs={'scale_factor': 0.01, 'add_offset': 273.15, - '_FillValue': -32768, 'units': 'kelvin', + dims=("nj", "ni"), + attrs={"scale_factor": 0.01, "add_offset": 273.15, + "_FillValue": -32768, "units": "kelvin", } ) self.fake_dataset = xr.Dataset( data_vars={ - 'sea_surface_temperature': self.sst, - 'longitude': self.lon, - 'latitude': self.lat, + "sea_surface_temperature": self.sst, + "longitude": self.lon, + "latitude": self.lat, }, attrs={ "start_time": "20220321T112640Z", "stop_time": "20220321T145711Z", - "platform": 'NOAA20', + "platform": "NOAA20", "sensor": "VIIRS", }, ) @@ -81,12 +81,12 @@ def _create_tarfile_with_testdata(self, mypath): slstrdir.mkdir(parents=True, exist_ok=True) tarfile_path = mypath / tarfile_fakename - ncfilename = slstrdir / 'L2P_GHRSST-SSTskin-202204131200.nc' + ncfilename = slstrdir / "L2P_GHRSST-SSTskin-202204131200.nc" self.fake_dataset.to_netcdf(os.fspath(ncfilename)) - xmlfile_path = slstrdir / 'xfdumanifest.xml' + xmlfile_path = slstrdir / "xfdumanifest.xml" xmlfile_path.touch() - with tarfile.open(name=tarfile_path, mode='w') as tar: + with tarfile.open(name=tarfile_path, mode="w") as tar: tar.add(os.fspath(ncfilename), arcname=Path(slstr_fakename) / ncfilename.name) tar.add(os.fspath(xmlfile_path), arcname=Path(slstr_fakename) / xmlfile_path.name) @@ -95,7 +95,7 @@ def _create_tarfile_with_testdata(self, mypath): def test_instantiate_single_netcdf_file(self, tmp_path): """Test initialization of file handlers - given a single netCDF file.""" filename_info = {} - tmp_filepath = tmp_path / 'fake_dataset.nc' + tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None) @@ -110,29 +110,29 @@ def test_instantiate_tarfile(self, tmp_path): def test_get_dataset(self, tmp_path): """Test retrieval of datasets.""" filename_info = {} - tmp_filepath = tmp_path / 'fake_dataset.nc' + tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) test = GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None) - test.get_dataset('longitude', {'standard_name': 'longitude'}) - test.get_dataset('latitude', {'standard_name': 'latitude'}) - test.get_dataset('sea_surface_temperature', {'standard_name': 'sea_surface_temperature'}) + test.get_dataset("longitude", {"standard_name": "longitude"}) + test.get_dataset("latitude", {"standard_name": "latitude"}) + test.get_dataset("sea_surface_temperature", {"standard_name": "sea_surface_temperature"}) with pytest.raises(KeyError): - test.get_dataset('erroneous dataset', {'standard_name': 'erroneous dataset'}) + test.get_dataset("erroneous dataset", {"standard_name": "erroneous dataset"}) def test_get_sensor(self, tmp_path): """Test retrieval of the sensor name from the netCDF file.""" dt_valid = datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z - filename_info = {'field_type': 'NARSST', 'generating_centre': 'FRA_', - 'satid': 'NOAA20_', 'valid_time': dt_valid} + filename_info = {"field_type": "NARSST", "generating_centre": "FRA_", + "satid": "NOAA20_", "valid_time": dt_valid} - tmp_filepath = tmp_path / 'fake_dataset.nc' + tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) test = GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None) - assert test.sensor == 'viirs' + assert test.sensor == "viirs" def test_get_start_and_end_times(self, tmp_path): """Test retrieval of the sensor name from the netCDF file.""" @@ -140,10 +140,10 @@ def test_get_start_and_end_times(self, tmp_path): good_start_time = datetime(2022, 3, 21, 11, 26, 40) # 20220321T112640Z good_stop_time = datetime(2022, 3, 21, 14, 57, 11) # 20220321T145711Z - filename_info = {'field_type': 'NARSST', 'generating_centre': 'FRA_', - 'satid': 'NOAA20_', 'valid_time': dt_valid} + filename_info = {"field_type": "NARSST", "generating_centre": "FRA_", + "satid": "NOAA20_", "valid_time": dt_valid} - tmp_filepath = tmp_path / 'fake_dataset.nc' + tmp_filepath = tmp_path / "fake_dataset.nc" self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) test = GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None) diff --git a/satpy/tests/reader_tests/test_glm_l2.py b/satpy/tests/reader_tests/test_glm_l2.py index 57d324f0b1..81636ba630 100644 --- a/satpy/tests/reader_tests/test_glm_l2.py +++ b/satpy/tests/reader_tests/test_glm_l2.py @@ -33,72 +33,72 @@ def setup_fake_dataset(): fed = fed.astype(np.int16) fed = xr.DataArray( fed, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'scale_factor': 0.5, - 'add_offset': -1., - '_FillValue': 0, - 'units': 'Count per nominal 3136 microradian^2 pixel per 1.0 min', - 'grid_mapping': 'goes_imager_projection', - 'standard_name': 'flash_extent_density', - 'long_name': 'Flash extent density', + "scale_factor": 0.5, + "add_offset": -1., + "_FillValue": 0, + "units": "Count per nominal 3136 microradian^2 pixel per 1.0 min", + "grid_mapping": "goes_imager_projection", + "standard_name": "flash_extent_density", + "long_name": "Flash extent density", } ) dqf = xr.DataArray( fed.data.copy().astype(np.uint8), - dims=('y', 'x'), + dims=("y", "x"), attrs={ - '_FillValue': -1, - 'units': '1', - 'grid_mapping': 'goes_imager_projection', - 'standard_name': 'status_flag', - 'long_name': 'GLM data quality flags', - 'flag_meanings': "valid invalid", + "_FillValue": -1, + "units": "1", + "grid_mapping": "goes_imager_projection", + "standard_name": "status_flag", + "long_name": "GLM data quality flags", + "flag_meanings": "valid invalid", } ) # create a variable that won't be configured to test available_datasets not_configured = xr.DataArray( fed.data.copy(), - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'scale_factor': 0.5, - 'add_offset': -1., - '_FillValue': 0, - 'units': '1', - 'grid_mapping': 'goes_imager_projection', - 'standard_name': 'test', - 'long_name': 'Test', + "scale_factor": 0.5, + "add_offset": -1., + "_FillValue": 0, + "units": "1", + "grid_mapping": "goes_imager_projection", + "standard_name": "test", + "long_name": "Test", } ) x__ = xr.DataArray( range(5), - attrs={'scale_factor': 2., 'add_offset': -1.}, - dims=('x',), + attrs={"scale_factor": 2., "add_offset": -1.}, + dims=("x",), ) y__ = xr.DataArray( range(2), - attrs={'scale_factor': -2., 'add_offset': 1.}, - dims=('y',), + attrs={"scale_factor": -2., "add_offset": 1.}, + dims=("y",), ) proj = xr.DataArray( [], attrs={ - 'semi_major_axis': 1., - 'semi_minor_axis': 1., - 'perspective_point_height': 1., - 'longitude_of_projection_origin': -90., - 'latitude_of_projection_origin': 0., - 'sweep_angle_axis': u'x' + "semi_major_axis": 1., + "semi_minor_axis": 1., + "perspective_point_height": 1., + "longitude_of_projection_origin": -90., + "latitude_of_projection_origin": 0., + "sweep_angle_axis": u"x" } ) fake_dataset = xr.Dataset( data_vars={ - 'flash_extent_density': fed, - 'not_configured': not_configured, - 'DQF': dqf, - 'x': x__, - 'y': y__, - 'goes_imager_projection': proj, + "flash_extent_density": fed, + "not_configured": not_configured, + "DQF": dqf, + "x": x__, + "y": y__, + "goes_imager_projection": proj, "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02) @@ -115,89 +115,87 @@ def setup_fake_dataset(): class TestGLML2FileHandler(unittest.TestCase): """Tests for the GLM L2 reader.""" - @mock.patch('satpy.readers.abi_base.xr') + @mock.patch("satpy.readers.abi_base.xr") def setUp(self, xr_): """Create a fake file handler to test.""" from satpy.readers.glm_l2 import NCGriddedGLML2 fake_dataset = setup_fake_dataset() xr_.open_dataset.return_value = fake_dataset - self.reader = NCGriddedGLML2('filename', - {'platform_shortname': 'G16', - 'scene_abbr': 'C', 'scan_mode': 'M3'}, - {'filetype': 'glm_l2_imagery'}) + self.reader = NCGriddedGLML2("filename", + {"platform_shortname": "G16", + "scene_abbr": "C", "scan_mode": "M3"}, + {"filetype": "glm_l2_imagery"}) def test_basic_attributes(self): """Test getting basic file attributes.""" from datetime import datetime - self.assertEqual(self.reader.start_time, - datetime(2017, 9, 20, 17, 30, 40)) - self.assertEqual(self.reader.end_time, - datetime(2017, 9, 20, 17, 41, 17)) + assert self.reader.start_time == datetime(2017, 9, 20, 17, 30, 40) + assert self.reader.end_time == datetime(2017, 9, 20, 17, 41, 17) def test_get_dataset(self): """Test the get_dataset method.""" from satpy.tests.utils import make_dataid - key = make_dataid(name='flash_extent_density') - res = self.reader.get_dataset(key, {'info': 'info'}) - exp = {'instrument_ID': None, - 'modifiers': (), - 'name': 'flash_extent_density', - 'orbital_parameters': {'projection_altitude': 1.0, - 'projection_latitude': 0.0, - 'projection_longitude': -90.0, + key = make_dataid(name="flash_extent_density") + res = self.reader.get_dataset(key, {"info": "info"}) + exp = {"instrument_ID": None, + "modifiers": (), + "name": "flash_extent_density", + "orbital_parameters": {"projection_altitude": 1.0, + "projection_latitude": 0.0, + "projection_longitude": -90.0, # 'satellite_nominal_altitude': 35786.02, - 'satellite_nominal_latitude': 0.0, - 'satellite_nominal_longitude': -89.5}, - 'orbital_slot': None, - 'platform_name': 'GOES-16', - 'platform_shortname': 'G16', - 'production_site': None, - 'scan_mode': 'M3', - 'scene_abbr': 'C', - 'scene_id': None, + "satellite_nominal_latitude": 0.0, + "satellite_nominal_longitude": -89.5}, + "orbital_slot": None, + "platform_name": "GOES-16", + "platform_shortname": "G16", + "production_site": None, + "scan_mode": "M3", + "scene_abbr": "C", + "scene_id": None, "spatial_resolution": "2km at nadir", - 'sensor': 'glm', - 'timeline_ID': None, - 'grid_mapping': 'goes_imager_projection', - 'standard_name': 'flash_extent_density', - 'long_name': 'Flash extent density', - 'units': 'Count per nominal 3136 microradian^2 pixel per 1.0 min'} + "sensor": "glm", + "timeline_ID": None, + "grid_mapping": "goes_imager_projection", + "standard_name": "flash_extent_density", + "long_name": "Flash extent density", + "units": "Count per nominal 3136 microradian^2 pixel per 1.0 min"} - self.assertDictEqual(res.attrs, exp) + assert res.attrs == exp def test_get_dataset_dqf(self): """Test the get_dataset method with special DQF var.""" from satpy.tests.utils import make_dataid - key = make_dataid(name='DQF') - res = self.reader.get_dataset(key, {'info': 'info'}) - exp = {'instrument_ID': None, - 'modifiers': (), - 'name': 'DQF', - 'orbital_parameters': {'projection_altitude': 1.0, - 'projection_latitude': 0.0, - 'projection_longitude': -90.0, + key = make_dataid(name="DQF") + res = self.reader.get_dataset(key, {"info": "info"}) + exp = {"instrument_ID": None, + "modifiers": (), + "name": "DQF", + "orbital_parameters": {"projection_altitude": 1.0, + "projection_latitude": 0.0, + "projection_longitude": -90.0, # 'satellite_nominal_altitude': 35786.02, - 'satellite_nominal_latitude': 0.0, - 'satellite_nominal_longitude': -89.5}, - 'orbital_slot': None, - 'platform_name': 'GOES-16', - 'platform_shortname': 'G16', - 'production_site': None, - 'scan_mode': 'M3', - 'scene_abbr': 'C', - 'scene_id': None, + "satellite_nominal_latitude": 0.0, + "satellite_nominal_longitude": -89.5}, + "orbital_slot": None, + "platform_name": "GOES-16", + "platform_shortname": "G16", + "production_site": None, + "scan_mode": "M3", + "scene_abbr": "C", + "scene_id": None, "spatial_resolution": "2km at nadir", - 'sensor': 'glm', - 'timeline_ID': None, - 'grid_mapping': 'goes_imager_projection', - 'units': '1', - '_FillValue': -1, - 'standard_name': 'status_flag', - 'long_name': 'GLM data quality flags', - 'flag_meanings': "valid invalid"} + "sensor": "glm", + "timeline_ID": None, + "grid_mapping": "goes_imager_projection", + "units": "1", + "_FillValue": -1, + "standard_name": "status_flag", + "long_name": "GLM data quality flags", + "flag_meanings": "valid invalid"} - self.assertDictEqual(res.attrs, exp) - self.assertTrue(np.issubdtype(res.dtype, np.integer)) + assert res.attrs == exp + assert np.issubdtype(res.dtype, np.integer) class TestGLML2Reader(unittest.TestCase): @@ -205,32 +203,32 @@ class TestGLML2Reader(unittest.TestCase): yaml_file = "glm_l2.yaml" - @mock.patch('satpy.readers.abi_base.xr') + @mock.patch("satpy.readers.abi_base.xr") def setUp(self, xr_): """Create a fake reader to test.""" from satpy._config import config_search_paths from satpy.readers import load_reader - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) fake_dataset = setup_fake_dataset() xr_.open_dataset.return_value = fake_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'OR_GLM-L2-GLMC-M3_G16_s20192862159000_e20192862200000_c20192862200350.nc', - 'CSPP_CG_GLM-L2-GLMC-M3_G16_s20192862159000_e20192862200000_c20192862200350.nc', + "OR_GLM-L2-GLMC-M3_G16_s20192862159000_e20192862200000_c20192862200350.nc", + "CSPP_CG_GLM-L2-GLMC-M3_G16_s20192862159000_e20192862200000_c20192862200350.nc", ]) - self.assertEqual(len(loadables), 2) + assert len(loadables) == 2 r.create_filehandlers(loadables) self.reader = r def test_available_datasets(self): """Test that resolution is added to YAML configured variables.""" # make sure we have some files - self.assertTrue(self.reader.file_handlers) + assert self.reader.file_handlers available_datasets = list(self.reader.available_dataset_ids) # flash_extent_density, DQF, and not_configured are available in our tests - self.assertEqual(len(available_datasets), 3) + assert len(available_datasets) == 3 for ds_id in available_datasets: - self.assertEqual(ds_id['resolution'], 2000) + assert ds_id["resolution"] == 2000 # make sure not_configured was discovered - names = [dataid['name'] for dataid in available_datasets] - assert 'not_configured' in names + names = [dataid["name"] for dataid in available_datasets] + assert "not_configured" in names diff --git a/satpy/tests/reader_tests/test_goes_imager_hrit.py b/satpy/tests/reader_tests/test_goes_imager_hrit.py index 02b9632335..cafe7c1e2c 100644 --- a/satpy/tests/reader_tests/test_goes_imager_hrit.py +++ b/satpy/tests/reader_tests/test_goes_imager_hrit.py @@ -51,8 +51,8 @@ def test_fun(self): (100.1640625, b"\x42\x64\x2a\x00")] for expected, str_val in test_data: - val = np.frombuffer(str_val, dtype='>i4') - self.assertEqual(expected, make_gvar_float(val)) + val = np.frombuffer(str_val, dtype=">i4") + assert expected == make_gvar_float(val) class TestMakeSGSTime(unittest.TestCase): @@ -63,98 +63,98 @@ def test_fun(self): # 2018-129 (may 9th), 21:33:27.999 tcds = np.array([(32, 24, 18, 146, 19, 50, 121, 153)], dtype=sgs_time) expected = datetime.datetime(2018, 5, 9, 21, 33, 27, 999000) - self.assertEqual(make_sgs_time(tcds[0]), expected) - - -test_pro = {'TISTR': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TCurr': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TCLMT': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'SubSatLongitude': 100.1640625, - 'TCHED': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TLTRL': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TIPFS': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TISPC': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'ReferenceLatitude': 0.0, - 'TIIRT': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TLHED': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TIVIT': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'SubSatLatitude': 0.0, - 'TIECL': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'ReferenceLongitude': 100.1640625, - 'TCTRL': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TLRAN': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TINFS': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TIBBC': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'TIONA': datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), - 'ReferenceDistance': 100.1640625, - 'SatelliteID': 15} + assert make_sgs_time(tcds[0]) == expected + + +test_pro = {"TISTR": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TCurr": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TCLMT": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "SubSatLongitude": 100.1640625, + "TCHED": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TLTRL": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TIPFS": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TISPC": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "ReferenceLatitude": 0.0, + "TIIRT": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TLHED": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TIVIT": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "SubSatLatitude": 0.0, + "TIECL": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "ReferenceLongitude": 100.1640625, + "TCTRL": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TLRAN": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TINFS": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TIBBC": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "TIONA": datetime.datetime(2018, 5, 9, 21, 33, 27, 999000), + "ReferenceDistance": 100.1640625, + "SatelliteID": 15} class TestHRITGOESPrologueFileHandler(unittest.TestCase): """Test the HRITFileHandler.""" - @mock.patch('satpy.readers.goes_imager_hrit.recarray2dict') - @mock.patch('satpy.readers.goes_imager_hrit.np.fromfile') - @mock.patch('satpy.readers.goes_imager_hrit.HRITFileHandler.__init__') + @mock.patch("satpy.readers.goes_imager_hrit.recarray2dict") + @mock.patch("satpy.readers.goes_imager_hrit.np.fromfile") + @mock.patch("satpy.readers.goes_imager_hrit.HRITFileHandler.__init__") def test_init(self, new_fh_init, fromfile, recarray2dict): """Setup the hrit file handler for testing.""" recarray2dict.side_effect = lambda x: x[0] - new_fh_init.return_value.filename = 'filename' - HRITGOESPrologueFileHandler.filename = 'filename' - HRITGOESPrologueFileHandler.mda = {'total_header_length': 1} + new_fh_init.return_value.filename = "filename" + HRITGOESPrologueFileHandler.filename = "filename" + HRITGOESPrologueFileHandler.mda = {"total_header_length": 1} ret = {} the_time = np.array([(32, 24, 18, 146, 19, 50, 121, 153)], dtype=sgs_time)[0] - for key in ['TCurr', 'TCHED', 'TCTRL', 'TLHED', 'TLTRL', 'TIPFS', - 'TINFS', 'TISPC', 'TIECL', 'TIBBC', 'TISTR', 'TLRAN', - 'TIIRT', 'TIVIT', 'TCLMT', 'TIONA']: + for key in ["TCurr", "TCHED", "TCTRL", "TLHED", "TLTRL", "TIPFS", + "TINFS", "TISPC", "TIECL", "TIBBC", "TISTR", "TLRAN", + "TIIRT", "TIVIT", "TCLMT", "TIONA"]: ret[key] = the_time - ret['SubSatLatitude'] = np.frombuffer(b"\x00\x00\x00\x00", dtype='>i4')[0] - ret['ReferenceLatitude'] = np.frombuffer(b"\x00\x00\x00\x00", dtype='>i4')[0] - ret['SubSatLongitude'] = np.frombuffer(b"\x42\x64\x2a\x00", dtype='>i4')[0] - ret['ReferenceLongitude'] = np.frombuffer(b"\x42\x64\x2a\x00", dtype='>i4')[0] - ret['ReferenceDistance'] = np.frombuffer(b"\x42\x64\x2a\x00", dtype='>i4')[0] - ret['SatelliteID'] = 15 + ret["SubSatLatitude"] = np.frombuffer(b"\x00\x00\x00\x00", dtype=">i4")[0] + ret["ReferenceLatitude"] = np.frombuffer(b"\x00\x00\x00\x00", dtype=">i4")[0] + ret["SubSatLongitude"] = np.frombuffer(b"\x42\x64\x2a\x00", dtype=">i4")[0] + ret["ReferenceLongitude"] = np.frombuffer(b"\x42\x64\x2a\x00", dtype=">i4")[0] + ret["ReferenceDistance"] = np.frombuffer(b"\x42\x64\x2a\x00", dtype=">i4")[0] + ret["SatelliteID"] = 15 fromfile.return_value = [ret] m = mock.mock_open() - with mock.patch('satpy.readers.goes_imager_hrit.open', m, create=True) as newopen: + with mock.patch("satpy.readers.goes_imager_hrit.open", m, create=True) as newopen: newopen.return_value.__enter__.return_value.seek.return_value = 1 self.reader = HRITGOESPrologueFileHandler( - 'filename', {'platform_shortname': 'GOES15', - 'start_time': datetime.datetime(2016, 3, 3, 0, 0), - 'service': 'test_service'}, - {'filetype': 'info'}) + "filename", {"platform_shortname": "GOES15", + "start_time": datetime.datetime(2016, 3, 3, 0, 0), + "service": "test_service"}, + {"filetype": "info"}) - self.assertEqual(test_pro, self.reader.prologue) + assert test_pro == self.reader.prologue class TestHRITGOESFileHandler(unittest.TestCase): """Test the HRITFileHandler.""" - @mock.patch('satpy.readers.goes_imager_hrit.HRITFileHandler.__init__') + @mock.patch("satpy.readers.goes_imager_hrit.HRITFileHandler.__init__") def setUp(self, new_fh_init): """Set up the hrit file handler for testing.""" - blob = '$HALFTONE:=10\r\n_NAME:=albedo\r\n_UNIT:=percent\r\n0:=0.0\r\n1023:=100.0\r\n'.encode() - mda = {'projection_parameters': {'SSP_longitude': -123.0}, - 'spectral_channel_id': 1, - 'image_data_function': blob} - HRITGOESFileHandler.filename = 'filename' + blob = "$HALFTONE:=10\r\n_NAME:=albedo\r\n_UNIT:=percent\r\n0:=0.0\r\n1023:=100.0\r\n".encode() + mda = {"projection_parameters": {"SSP_longitude": -123.0}, + "spectral_channel_id": 1, + "image_data_function": blob} + HRITGOESFileHandler.filename = "filename" HRITGOESFileHandler.mda = mda self.prologue = mock.MagicMock() self.prologue.prologue = test_pro - self.reader = HRITGOESFileHandler('filename', {}, {}, self.prologue) + self.reader = HRITGOESFileHandler("filename", {}, {}, self.prologue) def test_init(self): """Test the init.""" - blob = '$HALFTONE:=10\r\n_NAME:=albedo\r\n_UNIT:=percent\r\n0:=0.0\r\n1023:=100.0\r\n'.encode() - mda = {'spectral_channel_id': 1, - 'projection_parameters': {'SSP_longitude': 100.1640625}, - 'image_data_function': blob} - self.assertEqual(self.reader.mda, mda) + blob = "$HALFTONE:=10\r\n_NAME:=albedo\r\n_UNIT:=percent\r\n0:=0.0\r\n1023:=100.0\r\n".encode() + mda = {"spectral_channel_id": 1, + "projection_parameters": {"SSP_longitude": 100.1640625}, + "image_data_function": blob} + assert self.reader.mda == mda - @mock.patch('satpy.readers.goes_imager_hrit.HRITFileHandler.get_dataset') + @mock.patch("satpy.readers.goes_imager_hrit.HRITFileHandler.get_dataset") def test_get_dataset(self, base_get_dataset): """Test get_dataset.""" - key = make_dataid(name="CH1", calibration='reflectance') + key = make_dataid(name="CH1", calibration="reflectance") base_get_dataset.return_value = DataArray(np.arange(25).reshape(5, 5)) res = self.reader.get_dataset(key, {}) expected = np.array([[np.nan, 0.097752, 0.195503, 0.293255, 0.391007], @@ -163,37 +163,37 @@ def test_get_dataset(self, base_get_dataset): [1.466276, 1.564027, 1.661779, 1.759531, 1.857283], [1.955034, 2.052786, 2.150538, 2.248289, 2.346041]]) - self.assertTrue(np.allclose(res.values, expected, equal_nan=True)) - self.assertEqual(res.attrs['units'], '%') - self.assertDictEqual(res.attrs['orbital_parameters'], - {'projection_longitude': self.reader.mda['projection_parameters']['SSP_longitude'], - 'projection_latitude': 0.0, - 'projection_altitude': ALTITUDE}) + assert np.allclose(res.values, expected, equal_nan=True) + assert res.attrs["units"] == "%" + ssp_longitude = self.reader.mda["projection_parameters"]["SSP_longitude"] + assert res.attrs["orbital_parameters"] == {"projection_longitude": ssp_longitude, + "projection_latitude": 0.0, + "projection_altitude": ALTITUDE} def test_get_area_def(self): """Test getting the area definition.""" self.reader.mda.update({ - 'cfac': 10216334, - 'lfac': 10216334, - 'coff': 1408.0, - 'loff': 944.0, - 'number_of_lines': 464, - 'number_of_columns': 2816 + "cfac": 10216334, + "lfac": 10216334, + "coff": 1408.0, + "loff": 944.0, + "number_of_lines": 464, + "number_of_columns": 2816 }) - dsid = make_dataid(name="CH1", calibration='reflectance', + dsid = make_dataid(name="CH1", calibration="reflectance", resolution=3000) area = self.reader.get_area_def(dsid) a, b = proj4_radius_parameters(area.proj_dict) assert a == EQUATOR_RADIUS assert b == POLE_RADIUS - assert area.proj_dict['h'] == ALTITUDE - assert area.proj_dict['lon_0'] == 100.1640625 - assert area.proj_dict['proj'] == 'geos' - assert area.proj_dict['units'] == 'm' + assert area.proj_dict["h"] == ALTITUDE + assert area.proj_dict["lon_0"] == 100.1640625 + assert area.proj_dict["proj"] == "geos" + assert area.proj_dict["units"] == "m" assert area.width == 2816 assert area.height == 464 - assert area.area_id == 'goes-15_goes_imager_fd_3km' + assert area.area_id == "goes-15_goes_imager_fd_3km" area_extent_exp = (-5639254.900260435, 1925159.4881528523, 5643261.475678028, 3784210.48191544) np.testing.assert_allclose(area.area_extent, area_extent_exp) diff --git a/satpy/tests/reader_tests/test_goes_imager_nc_eum.py b/satpy/tests/reader_tests/test_goes_imager_nc_eum.py index d72271f623..68472aeb1a 100644 --- a/satpy/tests/reader_tests/test_goes_imager_nc_eum.py +++ b/satpy/tests/reader_tests/test_goes_imager_nc_eum.py @@ -30,12 +30,12 @@ class GOESNCEUMFileHandlerRadianceTest(unittest.TestCase): longMessage = True - @mock.patch('satpy.readers.goes_imager_nc.xr') + @mock.patch("satpy.readers.goes_imager_nc.xr") def setUp(self, xr_): """Set up the tests.""" from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESEUMNCFileHandler - self.coefs = CALIB_COEFS['GOES-15'] + self.coefs = CALIB_COEFS["GOES-15"] self.all_coefs = CALIB_COEFS self.channels = sorted(self.coefs.keys()) self.ir_channels = sorted([ch for ch in self.channels @@ -51,19 +51,19 @@ def setUp(self, xr_): nrows, ncols) # Includes invalid values to be masked xr_.open_dataset.return_value = xr.Dataset( - {'data': xr.DataArray(data=self.radiance, dims=('time', 'yc', 'xc')), - 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'), - dims=('time',)), - 'bands': xr.DataArray(data=np.array([1]))}, - attrs={'Satellite Sensor': 'G-15'}) + {"data": xr.DataArray(data=self.radiance, dims=("time", "yc", "xc")), + "time": xr.DataArray(data=np.array([0], dtype="datetime64[ms]"), + dims=("time",)), + "bands": xr.DataArray(data=np.array([1]))}, + attrs={"Satellite Sensor": "G-15"}) geo_data = xr.Dataset( - {'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')), - 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc'))}, - attrs={'Satellite Sensor': 'G-15'}) + {"lon": xr.DataArray(data=self.lon, dims=("yc", "xc")), + "lat": xr.DataArray(data=self.lat, dims=("yc", "xc"))}, + attrs={"Satellite Sensor": "G-15"}) # Instantiate reader using the mocked open_dataset() method - self.reader = GOESEUMNCFileHandler(filename='dummy', filename_info={}, + self.reader = GOESEUMNCFileHandler(filename="dummy", filename_info={}, filetype_info={}, geo_data=geo_data) def test_get_dataset_radiance(self): @@ -71,20 +71,19 @@ def test_get_dataset_radiance(self): for ch in self.channels: if not is_vis_channel(ch): radiance = self.reader.get_dataset( - key=make_dataid(name=ch, calibration='radiance'), info={}) + key=make_dataid(name=ch, calibration="radiance"), info={}) # ... this only compares the valid (unmasked) elements - self.assertTrue(np.all(self.radiance == radiance.to_masked_array()), - msg='get_dataset() returns invalid radiance for ' - 'channel {}'.format(ch)) + assert np.all(self.radiance == radiance.to_masked_array()), \ + f"get_dataset() returns invalid radiance for channel {ch}" def test_calibrate(self): """Test whether the correct calibration methods are called.""" for ch in self.channels: if not is_vis_channel(ch): - calibs = {'brightness_temperature': '_calibrate_ir'} + calibs = {"brightness_temperature": "_calibrate_ir"} for calib, method in calibs.items(): with mock.patch.object(self.reader, method) as target_func: - self.reader.calibrate(data=self.reader.nc['data'], + self.reader.calibrate(data=self.reader.nc["data"], calibration=calib, channel=ch) target_func.assert_called() @@ -107,11 +106,10 @@ def test_get_sector(self): (123, 456): UNKNOWN_SECTOR } for (nlines, ncols), sector_ref in shapes.items(): - for channel in ('00_7', '10_7'): + for channel in ("00_7", "10_7"): sector = self.reader._get_sector(channel=channel, nlines=nlines, ncols=ncols) - self.assertEqual(sector, sector_ref, - msg='Incorrect sector identification') + assert sector == sector_ref, "Incorrect sector identification" class GOESNCEUMFileHandlerReflectanceTest(unittest.TestCase): @@ -119,12 +117,12 @@ class GOESNCEUMFileHandlerReflectanceTest(unittest.TestCase): longMessage = True - @mock.patch('satpy.readers.goes_imager_nc.xr') + @mock.patch("satpy.readers.goes_imager_nc.xr") def setUp(self, xr_): """Set up the tests.""" from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESEUMNCFileHandler - self.coefs = CALIB_COEFS['GOES-15'] + self.coefs = CALIB_COEFS["GOES-15"] self.all_coefs = CALIB_COEFS self.channels = sorted(self.coefs.keys()) self.ir_channels = sorted([ch for ch in self.channels @@ -140,19 +138,19 @@ def setUp(self, xr_): nrows, ncols) # Includes invalid values to be masked xr_.open_dataset.return_value = xr.Dataset( - {'data': xr.DataArray(data=self.reflectance, dims=('time', 'yc', 'xc')), - 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'), - dims=('time',)), - 'bands': xr.DataArray(data=np.array([1]))}, - attrs={'Satellite Sensor': 'G-15'}) + {"data": xr.DataArray(data=self.reflectance, dims=("time", "yc", "xc")), + "time": xr.DataArray(data=np.array([0], dtype="datetime64[ms]"), + dims=("time",)), + "bands": xr.DataArray(data=np.array([1]))}, + attrs={"Satellite Sensor": "G-15"}) geo_data = xr.Dataset( - {'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')), - 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc'))}, - attrs={'Satellite Sensor': 'G-15'}) + {"lon": xr.DataArray(data=self.lon, dims=("yc", "xc")), + "lat": xr.DataArray(data=self.lat, dims=("yc", "xc"))}, + attrs={"Satellite Sensor": "G-15"}) # Instantiate reader using the mocked open_dataset() method - self.reader = GOESEUMNCFileHandler(filename='dummy', filename_info={}, + self.reader = GOESEUMNCFileHandler(filename="dummy", filename_info={}, filetype_info={}, geo_data=geo_data) def test_get_dataset_reflectance(self): @@ -160,8 +158,7 @@ def test_get_dataset_reflectance(self): for ch in self.channels: if is_vis_channel(ch): refl = self.reader.get_dataset( - key=make_dataid(name=ch, calibration='reflectance'), info={}) + key=make_dataid(name=ch, calibration="reflectance"), info={}) # ... this only compares the valid (unmasked) elements - self.assertTrue(np.all(self.reflectance == refl.to_masked_array()), - msg='get_dataset() returns invalid reflectance for ' - 'channel {}'.format(ch)) + assert np.all(self.reflectance == refl.to_masked_array()), \ + f"get_dataset() returns invalid reflectance for channel {ch}" diff --git a/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py b/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py index 6369568d1f..59236290b8 100644 --- a/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py +++ b/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py @@ -39,14 +39,14 @@ class GOESNCBaseFileHandlerTest(unittest.TestCase): longMessage = True - @mock.patch('satpy.readers.goes_imager_nc.xr') - @mock.patch.multiple('satpy.readers.goes_imager_nc.GOESNCBaseFileHandler', + @mock.patch("satpy.readers.goes_imager_nc.xr") + @mock.patch.multiple("satpy.readers.goes_imager_nc.GOESNCBaseFileHandler", _get_sector=mock.MagicMock()) def setUp(self, xr_): """Set up the tests.""" from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESNCBaseFileHandler - self.coefs = CALIB_COEFS['GOES-15'] + self.coefs = CALIB_COEFS["GOES-15"] # Mock file access to return a fake dataset. self.time = datetime.datetime(2018, 8, 16, 16, 7) @@ -54,29 +54,29 @@ def setUp(self, xr_): self.dummy2d = np.zeros((2, 2)) self.band = 1 self.nc = xr.Dataset( - {'data': xr.DataArray(self.dummy3d, dims=('time', 'yc', 'xc')), - 'lon': xr.DataArray(data=self.dummy2d, dims=('yc', 'xc')), - 'lat': xr.DataArray(data=self.dummy2d, dims=('yc', 'xc')), - 'time': xr.DataArray(data=np.array([self.time], - dtype='datetime64[ms]'), - dims=('time',)), - 'bands': xr.DataArray(data=np.array([self.band]))}, - attrs={'Satellite Sensor': 'G-15'}) + {"data": xr.DataArray(self.dummy3d, dims=("time", "yc", "xc")), + "lon": xr.DataArray(data=self.dummy2d, dims=("yc", "xc")), + "lat": xr.DataArray(data=self.dummy2d, dims=("yc", "xc")), + "time": xr.DataArray(data=np.array([self.time], + dtype="datetime64[ms]"), + dims=("time",)), + "bands": xr.DataArray(data=np.array([self.band]))}, + attrs={"Satellite Sensor": "G-15"}) xr_.open_dataset.return_value = self.nc # Instantiate reader using the mocked open_dataset() method. Also, make # the reader believe all abstract methods have been implemented. - self.reader = GOESNCBaseFileHandler(filename='dummy', filename_info={}, + self.reader = GOESNCBaseFileHandler(filename="dummy", filename_info={}, filetype_info={}) def test_init(self): """Tests reader initialization.""" - self.assertEqual(self.reader.nlines, self.dummy2d.shape[0]) - self.assertEqual(self.reader.ncols, self.dummy2d.shape[1]) - self.assertEqual(self.reader.platform_name, 'GOES-15') - self.assertEqual(self.reader.platform_shortname, 'goes15') - self.assertEqual(self.reader.gvar_channel, self.band) - self.assertIsInstance(self.reader.geo_data, xr.Dataset) + assert self.reader.nlines == self.dummy2d.shape[0] + assert self.reader.ncols == self.dummy2d.shape[1] + assert self.reader.platform_name == "GOES-15" + assert self.reader.platform_shortname == "goes15" + assert self.reader.gvar_channel == self.band + assert isinstance(self.reader.geo_data, xr.Dataset) def test_get_nadir_pixel(self): """Test identification of the nadir pixel.""" @@ -89,23 +89,20 @@ def test_get_nadir_pixel(self): [0, 0, 0, 0]]) nadir_row, nadir_col = self.reader._get_nadir_pixel( earth_mask=earth_mask, sector=FULL_DISC) - self.assertEqual((nadir_row, nadir_col), (2, 1), - msg='Incorrect nadir pixel') + assert (nadir_row, nadir_col) == (2, 1), "Incorrect nadir pixel" def test_viscounts2radiance(self): """Test conversion from VIS counts to radiance.""" # Reference data is for detector #1 - slope = self.coefs['00_7']['slope'][0] - offset = self.coefs['00_7']['offset'][0] + slope = self.coefs["00_7"]["slope"][0] + offset = self.coefs["00_7"]["offset"][0] counts = xr.DataArray([0, 100, 200, 500, 1000, 1023]) rad_expected = xr.DataArray( [0., 41.54896, 100.06862, 275.6276, 568.2259, 581.685422]) rad = self.reader._viscounts2radiance(counts=counts, slope=slope, offset=offset) - self.assertTrue(np.allclose(rad.data, rad_expected.data, atol=1E-6), - msg='Incorrect conversion from VIS counts to ' - 'radiance') + assert np.allclose(rad.data, rad_expected.data, atol=1e-06), "Incorrect conversion from VIS counts to radiance" def test_ircounts2radiance(self): """Test conversion from IR counts to radiance.""" @@ -115,10 +112,10 @@ def test_ircounts2radiance(self): # Reference Radiance from NOAA lookup tables (same for detectors 1 and # 2, see [IR]) rad_expected = { - '03_9': np.array([0, 0.140, 1.899, 4.098, 4.199]), - '06_5': np.array([0, 1.825, 12.124, 24.998, 25.590]), - '10_7': np.array([0, 16.126, 92.630, 188.259, 192.658]), - '13_3': np.array([0, 15.084, 87.421, 177.842, 182.001]) + "03_9": np.array([0, 0.140, 1.899, 4.098, 4.199]), + "06_5": np.array([0, 1.825, 12.124, 24.998, 25.590]), + "10_7": np.array([0, 16.126, 92.630, 188.259, 192.658]), + "13_3": np.array([0, 15.084, 87.421, 177.842, 182.001]) } # The input counts are exact, but the accuracy of the output radiance is @@ -128,64 +125,59 @@ def test_ircounts2radiance(self): for ch in sorted(rad_expected.keys()): coefs = self.coefs[ch] rad = self.reader._ircounts2radiance( - counts=counts, scale=coefs['scale'], offset=coefs['offset']) - self.assertTrue(np.allclose(rad.data, rad_expected[ch], atol=atol), - msg='Incorrect conversion from IR counts to ' - 'radiance in channel {}'.format(ch)) + counts=counts, scale=coefs["scale"], offset=coefs["offset"]) + assert np.allclose(rad.data, rad_expected[ch], atol=atol), \ + "Incorrect conversion from IR counts to radiance in channel {}".format(ch) def test_calibrate_vis(self): """Test VIS calibration.""" rad = xr.DataArray([0, 1, 10, 100, 500]) refl_expected = xr.DataArray([0., 0.188852, 1.88852, 18.8852, 94.426]) refl = self.reader._calibrate_vis(radiance=rad, - k=self.coefs['00_7']['k']) - self.assertTrue(np.allclose(refl.data, refl_expected.data, atol=1E-6), - msg='Incorrect conversion from radiance to ' - 'reflectance') + k=self.coefs["00_7"]["k"]) + assert np.allclose(refl.data, refl_expected.data, atol=1e-06), \ + "Incorrect conversion from radiance to reflectance" def test_calibrate_ir(self): """Test IR calibration.""" # Test radiance values and corresponding BT from NOAA lookup tables # rev. H (see [IR]). rad = { - '03_9': xr.DataArray([0, 0.1, 2, 3.997, 4.199]), - '06_5': xr.DataArray([0, 0.821, 12.201, 25.590, 100]), - '10_7': xr.DataArray([0, 11.727, 101.810, 189.407, 192.658]), - '13_3': xr.DataArray([0, 22.679, 90.133, 182.001, 500]) + "03_9": xr.DataArray([0, 0.1, 2, 3.997, 4.199]), + "06_5": xr.DataArray([0, 0.821, 12.201, 25.590, 100]), + "10_7": xr.DataArray([0, 11.727, 101.810, 189.407, 192.658]), + "13_3": xr.DataArray([0, 22.679, 90.133, 182.001, 500]) } bt_expected = { - '03_9': np.array([[np.nan, 253.213, 319.451, 339.983, np.nan], + "03_9": np.array([[np.nan, 253.213, 319.451, 339.983, np.nan], [np.nan, 253.213, 319.451, 339.983, np.nan]]), - '06_5': np.array([[np.nan, 200.291, 267.860, 294.988, np.nan], + "06_5": np.array([[np.nan, 200.291, 267.860, 294.988, np.nan], [np.nan, 200.308, 267.879, 295.008, np.nan]]), - '10_7': np.array([[np.nan, 200.105, 294.437, 339.960, np.nan], + "10_7": np.array([[np.nan, 200.105, 294.437, 339.960, np.nan], [np.nan, 200.097, 294.429, 339.953, np.nan]]), - '13_3': np.array([[np.nan, 200.006, 267.517, 321.986, np.nan], + "13_3": np.array([[np.nan, 200.006, 267.517, 321.986, np.nan], [np.nan, 200.014, 267.524, 321.990, np.nan]]) } # first row is for detector 1, second for detector 2. # The accuracy of the input radiance is limited to 3 digits so that # the results differ slightly. - atol = {'03_9': 0.04, '06_5': 0.03, '10_7': 0.01, '13_3': 0.01} + atol = {"03_9": 0.04, "06_5": 0.03, "10_7": 0.01, "13_3": 0.01} for ch in sorted(rad.keys()): coefs = self.coefs[ch] for det in [0, 1]: bt = self.reader._calibrate_ir(radiance=rad[ch], - coefs={'a': coefs['a'][det], - 'b': coefs['b'][det], - 'n': coefs['n'][det], - 'btmin': coefs['btmin'], - 'btmax': coefs['btmax']}) - self.assertTrue( - np.allclose(bt.data, bt_expected[ch][det], equal_nan=True, - atol=atol[ch]), - msg='Incorrect conversion from radiance to brightness ' - 'temperature in channel {} detector {}'.format(ch, det)) + coefs={"a": coefs["a"][det], + "b": coefs["b"][det], + "n": coefs["n"][det], + "btmin": coefs["btmin"], + "btmax": coefs["btmax"]}) + assert np.allclose(bt.data, bt_expected[ch][det], equal_nan=True, atol=atol[ch]), \ + f"Incorrect conversion from radiance to brightness temperature in channel {ch} detector {det}" def test_start_time(self): """Test dataset start time stamp.""" - self.assertEqual(self.reader.start_time, self.time) + assert self.reader.start_time == self.time def test_end_time(self): """Test dataset end time stamp.""" @@ -196,7 +188,7 @@ def test_end_time(self): } for sector, end_time in expected.items(): self.reader.sector = sector - self.assertEqual(self.reader.end_time, end_time) + assert self.reader.end_time == end_time class TestMetadata: @@ -217,7 +209,7 @@ def _apply_yaw_flip(self, data_array, yaw_flip): data_array.data = np.flipud(data_array.data) return data_array - @pytest.fixture + @pytest.fixture() def lons_lats(self, yaw_flip): """Get longitudes and latitudes.""" lon = xr.DataArray( @@ -235,7 +227,7 @@ def lons_lats(self, yaw_flip): self._apply_yaw_flip(lat, yaw_flip) return lon, lat - @pytest.fixture + @pytest.fixture() def dataset(self, lons_lats, channel_id): """Create a fake dataset.""" lon, lat = lons_lats @@ -252,16 +244,16 @@ def dataset(self, lons_lats, channel_id): bands = xr.DataArray([channel_id], dims="bands") return xr.Dataset( { - 'data': data, - 'lon': lon, - 'lat': lat, - 'time': time, - 'bands': bands, + "data": data, + "lon": lon, + "lat": lat, + "time": time, + "bands": bands, }, - attrs={'Satellite Sensor': 'G-15'} + attrs={"Satellite Sensor": "G-15"} ) - @pytest.fixture + @pytest.fixture() def earth_mask(self, yaw_flip): """Get expected earth mask.""" earth_mask = xr.DataArray( @@ -273,7 +265,7 @@ def earth_mask(self, yaw_flip): self._apply_yaw_flip(earth_mask, yaw_flip) return earth_mask - @pytest.fixture + @pytest.fixture() def geometry(self, channel_id, yaw_flip): """Get expected geometry.""" shapes = { @@ -286,20 +278,20 @@ def geometry(self, channel_id, yaw_flip): "shape": shapes[channel_id] } - @pytest.fixture + @pytest.fixture() def expected(self, geometry, earth_mask, yaw_flip): """Define expected metadata.""" proj_dict = { - 'a': '6378169', - 'h': '35785831', - 'lon_0': '0', - 'no_defs': 'None', - 'proj': 'geos', - 'rf': '295.488065897001', - 'type': 'crs', - 'units': 'm', - 'x_0': '0', - 'y_0': '0' + "a": "6378169", + "h": "35785831", + "lon_0": "0", + "no_defs": "None", + "proj": "geos", + "rf": "295.488065897001", + "type": "crs", + "units": "m", + "x_0": "0", + "y_0": "0" } area = AreaDefinition( area_id="goes_geos_uniform", @@ -319,7 +311,7 @@ def expected(self, geometry, earth_mask, yaw_flip): "nadir_col": 1 } - @pytest.fixture + @pytest.fixture() def mocked_file_handler(self, dataset): """Mock file handler to load the given fake dataset.""" from satpy.readers.goes_imager_nc import FULL_DISC, GOESNCFileHandler @@ -329,7 +321,7 @@ def mocked_file_handler(self, dataset): GOESNCFileHandler.ir_sectors[(3, 4)] = FULL_DISC GOESNCFileHandler.yaw_flip_sampling_distance = 1 return GOESNCFileHandler( - filename='dummy', + filename="dummy", filename_info={}, filetype_info={}, ) @@ -351,12 +343,12 @@ class GOESNCFileHandlerTest(unittest.TestCase): longMessage = True - @mock.patch('satpy.readers.goes_imager_nc.xr') + @mock.patch("satpy.readers.goes_imager_nc.xr") def setUp(self, xr_): """Set up the tests.""" from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESNCFileHandler - self.coefs = CALIB_COEFS['GOES-15'] + self.coefs = CALIB_COEFS["GOES-15"] self.all_coefs = CALIB_COEFS self.channels = sorted(self.coefs.keys()) self.ir_channels = sorted([ch for ch in self.channels @@ -374,114 +366,110 @@ def setUp(self, xr_): nrows, ncols) # Includes invalid values to be masked xr_.open_dataset.return_value = xr.Dataset( - {'data': xr.DataArray(data=self.counts, dims=('time', 'yc', 'xc')), - 'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')), - 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc')), - 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'), - dims=('time',)), - 'bands': xr.DataArray(data=np.array([1]))}, - attrs={'Satellite Sensor': 'G-15'}) + {"data": xr.DataArray(data=self.counts, dims=("time", "yc", "xc")), + "lon": xr.DataArray(data=self.lon, dims=("yc", "xc")), + "lat": xr.DataArray(data=self.lat, dims=("yc", "xc")), + "time": xr.DataArray(data=np.array([0], dtype="datetime64[ms]"), + dims=("time",)), + "bands": xr.DataArray(data=np.array([1]))}, + attrs={"Satellite Sensor": "G-15"}) # Instantiate reader using the mocked open_dataset() method - self.reader = GOESNCFileHandler(filename='dummy', filename_info={}, + self.reader = GOESNCFileHandler(filename="dummy", filename_info={}, filetype_info={}) def test_get_dataset_coords(self): """Test whether coordinates returned by get_dataset() are correct.""" - lon = self.reader.get_dataset(key=make_dataid(name='longitude'), + lon = self.reader.get_dataset(key=make_dataid(name="longitude"), info={}) - lat = self.reader.get_dataset(key=make_dataid(name='latitude'), + lat = self.reader.get_dataset(key=make_dataid(name="latitude"), info={}) # ... this only compares the valid (unmasked) elements - self.assertTrue(np.all(lat.to_masked_array() == self.lat), - msg='get_dataset() returns invalid latitude') - self.assertTrue(np.all(lon.to_masked_array() == self.lon), - msg='get_dataset() returns invalid longitude') + assert np.all(lat.to_masked_array() == self.lat), "get_dataset() returns invalid latitude" + assert np.all(lon.to_masked_array() == self.lon), "get_dataset() returns invalid longitude" def test_get_dataset_counts(self): """Test whether counts returned by get_dataset() are correct.""" from satpy.readers.goes_imager_nc import ALTITUDE, UNKNOWN_SECTOR - self.reader.meta.update({'lon0': -75.0, - 'lat0': 0.0, - 'sector': UNKNOWN_SECTOR, - 'nadir_row': 1, - 'nadir_col': 2, - 'area_def_uni': 'some_area'}) - attrs_exp = {'orbital_parameters': {'projection_longitude': -75.0, - 'projection_latitude': 0.0, - 'projection_altitude': ALTITUDE, - 'yaw_flip': True}, - 'platform_name': 'GOES-15', - 'sensor': 'goes_imager', - 'sector': UNKNOWN_SECTOR, - 'nadir_row': 1, - 'nadir_col': 2, - 'area_def_uniform_sampling': 'some_area'} + self.reader.meta.update({"lon0": -75.0, + "lat0": 0.0, + "sector": UNKNOWN_SECTOR, + "nadir_row": 1, + "nadir_col": 2, + "area_def_uni": "some_area"}) + attrs_exp = {"orbital_parameters": {"projection_longitude": -75.0, + "projection_latitude": 0.0, + "projection_altitude": ALTITUDE, + "yaw_flip": True}, + "platform_name": "GOES-15", + "sensor": "goes_imager", + "sector": UNKNOWN_SECTOR, + "nadir_row": 1, + "nadir_col": 2, + "area_def_uniform_sampling": "some_area"} for ch in self.channels: counts = self.reader.get_dataset( - key=make_dataid(name=ch, calibration='counts'), info={}) + key=make_dataid(name=ch, calibration="counts"), info={}) # ... this only compares the valid (unmasked) elements - self.assertTrue(np.all(self.counts/32. == counts.to_masked_array()), - msg='get_dataset() returns invalid counts for ' - 'channel {}'.format(ch)) + assert np.all(self.counts / 32.0 == counts.to_masked_array()), \ + f"get_dataset() returns invalid counts for channel {ch}" # Check attributes - self.assertDictEqual(counts.attrs, attrs_exp) + assert counts.attrs == attrs_exp def test_get_dataset_masks(self): """Test whether data and coordinates are masked consistently.""" # Requires that no element has been masked due to invalid # radiance/reflectance/BT (see setUp()). - lon = self.reader.get_dataset(key=make_dataid(name='longitude'), + lon = self.reader.get_dataset(key=make_dataid(name="longitude"), info={}) lon_mask = lon.to_masked_array().mask for ch in self.channels: - for calib in ('counts', 'radiance', 'reflectance', - 'brightness_temperature'): + for calib in ("counts", "radiance", "reflectance", + "brightness_temperature"): try: data = self.reader.get_dataset( key=make_dataid(name=ch, calibration=calib), info={}) except ValueError: continue data_mask = data.to_masked_array().mask - self.assertTrue(np.all(data_mask == lon_mask), - msg='get_dataset() returns inconsistently ' - 'masked {} in channel {}'.format(calib, ch)) + assert np.all(data_mask == lon_mask), \ + f"get_dataset() returns inconsistently masked {calib} in channel {ch}" def test_get_dataset_invalid(self): """Test handling of invalid calibrations.""" # VIS -> BT - args = dict(key=make_dataid(name='00_7', - calibration='brightness_temperature'), + args = dict(key=make_dataid(name="00_7", + calibration="brightness_temperature"), info={}) self.assertRaises(ValueError, self.reader.get_dataset, **args) # IR -> Reflectance - args = dict(key=make_dataid(name='10_7', - calibration='reflectance'), + args = dict(key=make_dataid(name="10_7", + calibration="reflectance"), info={}) self.assertRaises(ValueError, self.reader.get_dataset, **args) # Unsupported calibration - with pytest.raises(ValueError): - args = dict(key=make_dataid(name='10_7', - calibration='invalid'), - info={}) + with pytest.raises(ValueError, match="invalid invalid value for "): + _ = dict(key=make_dataid(name="10_7", + calibration="invalid"), + info={}) def test_calibrate(self): """Test whether the correct calibration methods are called.""" for ch in self.channels: if is_vis_channel(ch): - calibs = {'radiance': '_viscounts2radiance', - 'reflectance': '_calibrate_vis'} + calibs = {"radiance": "_viscounts2radiance", + "reflectance": "_calibrate_vis"} else: - calibs = {'radiance': '_ircounts2radiance', - 'brightness_temperature': '_calibrate_ir'} + calibs = {"radiance": "_ircounts2radiance", + "brightness_temperature": "_calibrate_ir"} for calib, method in calibs.items(): with mock.patch.object(self.reader, method) as target_func: - self.reader.calibrate(counts=self.reader.nc['data'], + self.reader.calibrate(counts=self.reader.nc["data"], calibration=calib, channel=ch) target_func.assert_called() @@ -515,20 +503,19 @@ def test_get_sector(self): shapes.update(shapes_vis) for (nlines, ncols), sector_ref in shapes.items(): if (nlines, ncols) in shapes_vis: - channel = '00_7' + channel = "00_7" else: - channel = '10_7' + channel = "10_7" sector = self.reader._get_sector(channel=channel, nlines=nlines, ncols=ncols) - self.assertEqual(sector, sector_ref, - msg='Incorrect sector identification') + assert sector == sector_ref, "Incorrect sector identification" class TestChannelIdentification: """Test identification of channel type.""" @pytest.mark.parametrize( - "channel_name,expected", + ("channel_name", "expected"), [ ("00_7", True), ("10_7", False), @@ -542,5 +529,5 @@ def test_is_vis_channel(self, channel_name, expected): def test_invalid_channel(self): """Test handling of invalid channel type.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Invalid channel"): is_vis_channel({"foo": "bar"}) diff --git a/satpy/tests/reader_tests/test_gpm_imerg.py b/satpy/tests/reader_tests/test_gpm_imerg.py index a91f6d300f..508be247d5 100644 --- a/satpy/tests/reader_tests/test_gpm_imerg.py +++ b/satpy/tests/reader_tests/test_gpm_imerg.py @@ -41,31 +41,31 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): def _get_geo_data(self, num_rows, num_cols): geo = { - 'Grid/lon': + "Grid/lon": xr.DataArray(DEFAULT_LON_DATA, - attrs={'units': 'degrees_east', }, - dims=('lon')), - 'Grid/lat': + attrs={"units": "degrees_east", }, + dims=("lon")), + "Grid/lat": xr.DataArray(DEFAULT_LAT_DATA, - attrs={'units': 'degrees_north', }, - dims=('lat')), + attrs={"units": "degrees_north", }, + dims=("lat")), } return geo def _get_precip_data(self, num_rows, num_cols): selection = { - 'Grid/IRprecipitation': + "Grid/IRprecipitation": xr.DataArray( da.ones((1, num_cols, num_rows), chunks=1024, dtype=np.float32), attrs={ - '_FillValue': -9999.9, - 'units': 'mm/hr', - 'Units': 'mm/hr', - 'badval': h5py.h5r.Reference(), - 'badvals': np.array([[h5py.h5r.Reference()]]) + "_FillValue": -9999.9, + "units": "mm/hr", + "Units": "mm/hr", + "badval": h5py.h5r.Reference(), + "badvals": np.array([[h5py.h5r.Reference()]]) }, - dims=('time', 'lon', 'lat')), + dims=("time", "lon", "lat")), } return selection @@ -93,9 +93,9 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.gpm_imerg import Hdf5IMERG - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(Hdf5IMERG, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(Hdf5IMERG, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -109,33 +109,27 @@ def test_load_data(self): # Filename to test, needed for start and end times filenames = [ - '3B-HHR.MS.MRG.3IMERG.20200131-S233000-E235959.1410.V06B.HDF5', ] + "3B-HHR.MS.MRG.3IMERG.20200131-S233000-E235959.1410.V06B.HDF5", ] # Expected projection in area def - pdict = {'proj': 'longlat', - 'datum': 'WGS84', - 'no_defs': None, - 'type': 'crs'} + pdict = {"proj": "longlat", + "datum": "WGS84", + "no_defs": None, + "type": "crs"} reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) - self.assertEqual(1, len(files)) + assert 1 == len(files) reader.create_filehandlers(files) # Make sure we have some files - self.assertTrue(reader.file_handlers) - res = reader.load(['IRprecipitation']) - self.assertEqual(1, len(res)) - self.assertEqual(res['IRprecipitation'].start_time, - datetime(2020, 1, 31, 23, 30, 0)) - self.assertEqual(res['IRprecipitation'].end_time, - datetime(2020, 1, 31, 23, 59, 59)) - self.assertEqual(res['IRprecipitation'].resolution, - 0.1) - self.assertEqual(res['IRprecipitation'].area.width, - 3600) - self.assertEqual(res['IRprecipitation'].area.height, - 1800) - self.assertEqual(res['IRprecipitation'].area.proj_dict, - pdict) - np.testing.assert_almost_equal(res['IRprecipitation'].area.area_extent, + assert reader.file_handlers + res = reader.load(["IRprecipitation"]) + assert 1 == len(res) + assert res["IRprecipitation"].start_time == datetime(2020, 1, 31, 23, 30, 0) + assert res["IRprecipitation"].end_time == datetime(2020, 1, 31, 23, 59, 59) + assert res["IRprecipitation"].resolution == 0.1 + assert res["IRprecipitation"].area.width == 3600 + assert res["IRprecipitation"].area.height == 1800 + assert res["IRprecipitation"].area.proj_dict == pdict + np.testing.assert_almost_equal(res["IRprecipitation"].area.area_extent, (-179.95, -89.95, 179.95, 89.95), 5) diff --git a/satpy/tests/reader_tests/test_grib.py b/satpy/tests/reader_tests/test_grib.py index b349e91169..dce6b3f557 100644 --- a/satpy/tests/reader_tests/test_grib.py +++ b/satpy/tests/reader_tests/test_grib.py @@ -28,14 +28,14 @@ from satpy.dataset import DataQuery # Parameterized cases -TEST_ARGS = ('proj_params', 'lon_corners', 'lat_corners') +TEST_ARGS = ("proj_params", "lon_corners", "lat_corners") TEST_PARAMS = ( (None, None, None), # cyl default case ( { - 'a': 6371229, 'b': 6371229, 'proj': 'lcc', - 'lon_0': 265.0, 'lat_0': 25.0, - 'lat_1': 25.0, 'lat_2': 25.0 + "a": 6371229, "b": 6371229, "proj": "lcc", + "lon_0": 265.0, "lat_0": 25.0, + "lat_1": 25.0, "lat_2": 25.0 }, [-133.459, -65.12555139, -152.8786225, -49.41598659], [12.19, 14.34208538, 54.56534318, 57.32843565] @@ -78,7 +78,7 @@ def __init__(self, values, proj_params=None, latlons=None, **attrs): self.attrs = attrs self.values = values if proj_params is None: - proj_params = {'a': 6371229, 'b': 6371229, 'proj': 'cyl'} + proj_params = {"a": 6371229, "b": 6371229, "proj": "cyl"} self.projparams = proj_params self._latlons = latlons @@ -111,12 +111,12 @@ def __init__(self, messages=None, proj_params=None, latlons=None): self._messages = [ FakeMessage( values=fake_gribdata(), - name='TEST', - shortName='t', + name="TEST", + shortName="t", level=100, - pressureUnits='hPa', - cfName='air_temperature', - units='K', + pressureUnits="hPa", + cfName="air_temperature", + units="K", dataDate=20180504, dataTime=1200, validityDate=20180504, @@ -124,22 +124,22 @@ def __init__(self, messages=None, proj_params=None, latlons=None): distinctLongitudes=np.arange(5.), distinctLatitudes=np.arange(5.), missingValue=9999, - modelName='notknown', + modelName="notknown", minimum=100., maximum=200., - typeOfLevel='isobaricInhPa', + typeOfLevel="isobaricInhPa", jScansPositively=0, proj_params=proj_params, latlons=latlons, ), FakeMessage( values=fake_gribdata(), - name='TEST', - shortName='t', + name="TEST", + shortName="t", level=200, - pressureUnits='hPa', - cfName='air_temperature', - units='K', + pressureUnits="hPa", + cfName="air_temperature", + units="K", dataDate=20180504, dataTime=1200, validityDate=20180504, @@ -147,22 +147,22 @@ def __init__(self, messages=None, proj_params=None, latlons=None): distinctLongitudes=np.arange(5.), distinctLatitudes=np.arange(5.), missingValue=9999, - modelName='notknown', + modelName="notknown", minimum=100., maximum=200., - typeOfLevel='isobaricInhPa', + typeOfLevel="isobaricInhPa", jScansPositively=1, proj_params=proj_params, latlons=latlons, ), FakeMessage( values=fake_gribdata(), - name='TEST', - shortName='t', + name="TEST", + shortName="t", level=300, - pressureUnits='hPa', - cfName='air_temperature', - units='K', + pressureUnits="hPa", + cfName="air_temperature", + units="K", dataDate=20180504, dataTime=1200, validityDate=20180504, @@ -172,7 +172,7 @@ def __init__(self, messages=None, proj_params=None, latlons=None): missingValue=9999, minimum=100., maximum=200., - typeOfLevel='isobaricInhPa', + typeOfLevel="isobaricInhPa", jScansPositively=0, proj_params=proj_params, latlons=latlons, @@ -208,29 +208,29 @@ class TestGRIBReader: def setup_method(self): """Wrap pygrib to read fake data.""" from satpy._config import config_search_paths - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) try: import pygrib except ImportError: pygrib = None self.orig_pygrib = pygrib - sys.modules['pygrib'] = mock.MagicMock() + sys.modules["pygrib"] = mock.MagicMock() def teardown_method(self): """Re-enable pygrib import.""" - sys.modules['pygrib'] = self.orig_pygrib + sys.modules["pygrib"] = self.orig_pygrib def _get_test_datasets(self, dataids, fake_pygrib=None): from satpy.readers import load_reader if fake_pygrib is None: fake_pygrib = FakeGRIB() - with mock.patch('satpy.readers.grib.pygrib') as pg: + with mock.patch("satpy.readers.grib.pygrib") as pg: pg.open.return_value = fake_pygrib r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'gfs.t18z.sfluxgrbf106.grib2', + "gfs.t18z.sfluxgrbf106.grib2", ]) r.create_filehandlers(loadables) datasets = r.load(dataids) @@ -262,11 +262,11 @@ def _get_fake_pygrib(proj_params, lon_corners, lat_corners): def test_init(self): """Test basic init with no extra parameters.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.grib.pygrib') as pg: + with mock.patch("satpy.readers.grib.pygrib") as pg: pg.open.return_value = FakeGRIB() r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'gfs.t18z.sfluxgrbf106.grib2', + "gfs.t18z.sfluxgrbf106.grib2", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) @@ -293,25 +293,25 @@ def test_load_all(self, proj_params, lon_corners, lat_corners): """Test loading all test datasets.""" fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) dataids = [ - DataQuery(name='t', level=100, modifiers=tuple()), - DataQuery(name='t', level=200, modifiers=tuple()), - DataQuery(name='t', level=300, modifiers=tuple()) + DataQuery(name="t", level=100, modifiers=tuple()), + DataQuery(name="t", level=200, modifiers=tuple()), + DataQuery(name="t", level=300, modifiers=tuple()) ] datasets = self._get_test_datasets(dataids, fake_pygrib) assert len(datasets) == 3 for v in datasets.values(): - assert v.attrs['units'] == 'K' + assert v.attrs["units"] == "K" assert isinstance(v, xr.DataArray) @pytest.mark.parametrize(TEST_ARGS, TEST_PARAMS) def test_area_def_crs(self, proj_params, lon_corners, lat_corners): """Check that the projection is accurate.""" fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) - dataids = [DataQuery(name='t', level=100, modifiers=tuple())] + dataids = [DataQuery(name="t", level=100, modifiers=tuple())] datasets = self._get_test_datasets(dataids, fake_pygrib) - area = datasets['t'].attrs['area'] - if not hasattr(area, 'crs'): + area = datasets["t"].attrs["area"] + if not hasattr(area, "crs"): pytest.skip("Can't test with pyproj < 2.0") _round_trip_projection_lonlat_check(area) @@ -321,12 +321,12 @@ def test_missing_attributes(self, proj_params, lon_corners, lat_corners): fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) # This has modelName - query_contains = DataQuery(name='t', level=100, modifiers=tuple()) + query_contains = DataQuery(name="t", level=100, modifiers=tuple()) # This does not have modelName - query_not_contains = DataQuery(name='t', level=300, modifiers=tuple()) + query_not_contains = DataQuery(name="t", level=300, modifiers=tuple()) dataset = self._get_test_datasets([query_contains, query_not_contains], fake_pygrib) - assert dataset[query_contains].attrs['modelName'] == 'notknown' - assert dataset[query_not_contains].attrs['modelName'] == 'unknown' + assert dataset[query_contains].attrs["modelName"] == "notknown" + assert dataset[query_not_contains].attrs["modelName"] == "unknown" @pytest.mark.parametrize(TEST_ARGS, TEST_PARAMS) def test_jscanspositively(self, proj_params, lon_corners, lat_corners): @@ -334,9 +334,9 @@ def test_jscanspositively(self, proj_params, lon_corners, lat_corners): fake_pygrib = self._get_fake_pygrib(proj_params, lon_corners, lat_corners) # This has no jScansPositively - query_not_contains = DataQuery(name='t', level=100, modifiers=tuple()) + query_not_contains = DataQuery(name="t", level=100, modifiers=tuple()) # This contains jScansPositively - query_contains = DataQuery(name='t', level=200, modifiers=tuple()) + query_contains = DataQuery(name="t", level=200, modifiers=tuple()) dataset = self._get_test_datasets([query_contains, query_not_contains], fake_pygrib) np.testing.assert_allclose(fake_gribdata(), dataset[query_not_contains].values) diff --git a/satpy/tests/reader_tests/test_hdf4_utils.py b/satpy/tests/reader_tests/test_hdf4_utils.py index 9a0773c2c1..da75113439 100644 --- a/satpy/tests/reader_tests/test_hdf4_utils.py +++ b/satpy/tests/reader_tests/test_hdf4_utils.py @@ -67,20 +67,20 @@ class TestHDF4FileHandler(unittest.TestCase): def setUp(self): """Create a test HDF4 file.""" from pyhdf.SD import SD, SDC - h = SD('test.hdf', SDC.WRITE | SDC.CREATE | SDC.TRUNC) + h = SD("test.hdf", SDC.WRITE | SDC.CREATE | SDC.TRUNC) data = np.arange(10. * 100, dtype=np.float32).reshape((10, 100)) - v1 = h.create('ds1_f', SDC.FLOAT32, (10, 100)) + v1 = h.create("ds1_f", SDC.FLOAT32, (10, 100)) v1[:] = data - v2 = h.create('ds1_i', SDC.INT16, (10, 100)) + v2 = h.create("ds1_i", SDC.INT16, (10, 100)) v2[:] = data.astype(np.int16) # Add attributes - h.test_attr_str = 'test_string' + h.test_attr_str = "test_string" h.test_attr_int = 0 h.test_attr_float = 1.2 # h.test_attr_str_arr = np.array(b"test_string2") for d in [v1, v2]: - d.test_attr_str = 'test_string' + d.test_attr_str = "test_string" d.test_attr_int = 0 d.test_attr_float = 1.2 @@ -88,34 +88,34 @@ def setUp(self): def tearDown(self): """Remove the previously created test file.""" - os.remove('test.hdf') + os.remove("test.hdf") def test_all_basic(self): """Test everything about the HDF4 class.""" from satpy.readers.hdf4_utils import HDF4FileHandler - file_handler = HDF4FileHandler('test.hdf', {}, {}) + file_handler = HDF4FileHandler("test.hdf", {}, {}) - for ds in ('ds1_f', 'ds1_i'): - self.assertEqual(file_handler[ds + '/dtype'], np.float32 if ds.endswith('f') else np.int16) - self.assertTupleEqual(file_handler[ds + '/shape'], (10, 100)) + for ds in ("ds1_f", "ds1_i"): + assert file_handler[ds + "/dtype"] == (np.float32 if ds.endswith("f") else np.int16) + assert file_handler[ds + "/shape"] == (10, 100) # make sure that the dtype is an instance, not the class - self.assertEqual(file_handler[ds].dtype.itemsize, 4 if ds.endswith('f') else 2) + assert file_handler[ds].dtype.itemsize == (4 if ds.endswith("f") else 2) attrs = file_handler[ds].attrs - self.assertEqual(attrs.get('test_attr_str'), 'test_string') - self.assertEqual(attrs.get('test_attr_int'), 0) - self.assertEqual(attrs.get('test_attr_float'), 1.2) + assert attrs.get("test_attr_str") == "test_string" + assert attrs.get("test_attr_int") == 0 + assert attrs.get("test_attr_float") == 1.2 - self.assertIsInstance(file_handler['/attr/test_attr_str'], str) - self.assertEqual(file_handler['/attr/test_attr_str'], 'test_string') + assert isinstance(file_handler["/attr/test_attr_str"], str) + assert file_handler["/attr/test_attr_str"] == "test_string" # self.assertEqual(file_handler['/attr/test_attr_str_arr'], 'test_string2') - self.assertIsInstance(file_handler['/attr/test_attr_int'], int) - self.assertEqual(file_handler['/attr/test_attr_int'], 0) - self.assertIsInstance(file_handler['/attr/test_attr_float'], float) - self.assertEqual(file_handler['/attr/test_attr_float'], 1.2) + assert isinstance(file_handler["/attr/test_attr_int"], int) + assert file_handler["/attr/test_attr_int"] == 0 + assert isinstance(file_handler["/attr/test_attr_float"], float) + assert file_handler["/attr/test_attr_float"] == 1.2 - self.assertIsInstance(file_handler.get('ds1_f'), xr.DataArray) - self.assertIsNone(file_handler.get('fake_ds')) - self.assertEqual(file_handler.get('fake_ds', 'test'), 'test') + assert isinstance(file_handler.get("ds1_f"), xr.DataArray) + assert file_handler.get("fake_ds") is None + assert file_handler.get("fake_ds", "test") == "test" - self.assertTrue('ds1_f' in file_handler) - self.assertFalse('fake_ds' in file_handler) + assert "ds1_f" in file_handler + assert "fake_ds" not in file_handler diff --git a/satpy/tests/reader_tests/test_hdf5_utils.py b/satpy/tests/reader_tests/test_hdf5_utils.py index 2c5fd2d19a..4882701c94 100644 --- a/satpy/tests/reader_tests/test_hdf5_utils.py +++ b/satpy/tests/reader_tests/test_hdf5_utils.py @@ -67,86 +67,86 @@ class TestHDF5FileHandler(unittest.TestCase): def setUp(self): """Create a test HDF5 file.""" import h5py - h = h5py.File('test.h5', 'w') + h = h5py.File("test.h5", "w") # Create Group - g1 = h.create_group('test_group') + g1 = h.create_group("test_group") # Add datasets - ds1_f = g1.create_dataset('ds1_f', + ds1_f = g1.create_dataset("ds1_f", shape=(10, 100), dtype=np.float32, data=np.arange(10. * 100).reshape((10, 100))) - ds1_i = g1.create_dataset('ds1_i', + ds1_i = g1.create_dataset("ds1_i", shape=(10, 100), dtype=np.int32, data=np.arange(10 * 100).reshape((10, 100))) - ds2_f = h.create_dataset('ds2_f', + ds2_f = h.create_dataset("ds2_f", shape=(10, 100), dtype=np.float32, data=np.arange(10. * 100).reshape((10, 100))) - ds2_i = h.create_dataset('ds2_i', + ds2_i = h.create_dataset("ds2_i", shape=(10, 100), dtype=np.int32, data=np.arange(10 * 100).reshape((10, 100))) # Add attributes # shows up as a scalar array of bytes (shape=(), size=1) - h.attrs['test_attr_str'] = 'test_string' - h.attrs['test_attr_byte'] = b'test_byte' - h.attrs['test_attr_int'] = 0 - h.attrs['test_attr_float'] = 1.2 + h.attrs["test_attr_str"] = "test_string" + h.attrs["test_attr_byte"] = b"test_byte" + h.attrs["test_attr_int"] = 0 + h.attrs["test_attr_float"] = 1.2 # shows up as a numpy bytes object - h.attrs['test_attr_str_arr'] = np.array(b"test_string2") - g1.attrs['test_attr_str'] = 'test_string' - g1.attrs['test_attr_byte'] = b'test_byte' - g1.attrs['test_attr_int'] = 0 - g1.attrs['test_attr_float'] = 1.2 + h.attrs["test_attr_str_arr"] = np.array(b"test_string2") + g1.attrs["test_attr_str"] = "test_string" + g1.attrs["test_attr_byte"] = b"test_byte" + g1.attrs["test_attr_int"] = 0 + g1.attrs["test_attr_float"] = 1.2 for d in [ds1_f, ds1_i, ds2_f, ds2_i]: - d.attrs['test_attr_str'] = 'test_string' - d.attrs['test_attr_byte'] = b'test_byte' - d.attrs['test_attr_int'] = 0 - d.attrs['test_attr_float'] = 1.2 - d.attrs['test_ref'] = d.ref + d.attrs["test_attr_str"] = "test_string" + d.attrs["test_attr_byte"] = b"test_byte" + d.attrs["test_attr_int"] = 0 + d.attrs["test_attr_float"] = 1.2 + d.attrs["test_ref"] = d.ref self.var_attrs = list(d.attrs.keys()) h.close() def tearDown(self): """Remove the previously created test file.""" - os.remove('test.h5') + os.remove("test.h5") def test_all_basic(self): """Test everything about the HDF5 class.""" import xarray as xr from satpy.readers.hdf5_utils import HDF5FileHandler - file_handler = HDF5FileHandler('test.h5', {}, {}) + file_handler = HDF5FileHandler("test.h5", {}, {}) - for ds_name in ('test_group/ds1_f', 'test_group/ds1_i', 'ds2_f', 'ds2_i'): + for ds_name in ("test_group/ds1_f", "test_group/ds1_i", "ds2_f", "ds2_i"): ds = file_handler[ds_name] attrs = ds.attrs - self.assertEqual(ds.dtype, np.float32 if ds_name.endswith('f') else np.int32) - self.assertTupleEqual(file_handler[ds_name + '/shape'], (10, 100)) - self.assertEqual(attrs['test_attr_str'], 'test_string') - self.assertEqual(attrs['test_attr_byte'], 'test_byte') - self.assertEqual(attrs['test_attr_int'], 0) - self.assertEqual(attrs['test_attr_float'], 1.2) - self.assertEqual(file_handler[ds_name + '/attr/test_attr_str'], 'test_string') - self.assertEqual(file_handler[ds_name + '/attr/test_attr_byte'], 'test_byte') - self.assertEqual(file_handler[ds_name + '/attr/test_attr_int'], 0) - self.assertEqual(file_handler[ds_name + '/attr/test_attr_float'], 1.2) - - self.assertEqual(file_handler['/attr/test_attr_str'], 'test_string') - self.assertEqual(file_handler['/attr/test_attr_byte'], 'test_byte') - self.assertEqual(file_handler['/attr/test_attr_str_arr'], 'test_string2') - self.assertEqual(file_handler['/attr/test_attr_int'], 0) - self.assertEqual(file_handler['/attr/test_attr_float'], 1.2) - - self.assertIsInstance(file_handler.get('ds2_f'), xr.DataArray) - self.assertIsNone(file_handler.get('fake_ds')) - self.assertEqual(file_handler.get('fake_ds', 'test'), 'test') - - self.assertTrue('ds2_f' in file_handler) - self.assertFalse('fake_ds' in file_handler) - - self.assertIsInstance(file_handler['ds2_f/attr/test_ref'], np.ndarray) + assert ds.dtype == (np.float32 if ds_name.endswith("f") else np.int32) + assert file_handler[ds_name + "/shape"] == (10, 100) + assert attrs["test_attr_str"] == "test_string" + assert attrs["test_attr_byte"] == "test_byte" + assert attrs["test_attr_int"] == 0 + assert attrs["test_attr_float"] == 1.2 + assert file_handler[ds_name + "/attr/test_attr_str"] == "test_string" + assert file_handler[ds_name + "/attr/test_attr_byte"] == "test_byte" + assert file_handler[ds_name + "/attr/test_attr_int"] == 0 + assert file_handler[ds_name + "/attr/test_attr_float"] == 1.2 + + assert file_handler["/attr/test_attr_str"] == "test_string" + assert file_handler["/attr/test_attr_byte"] == "test_byte" + assert file_handler["/attr/test_attr_str_arr"] == "test_string2" + assert file_handler["/attr/test_attr_int"] == 0 + assert file_handler["/attr/test_attr_float"] == 1.2 + + assert isinstance(file_handler.get("ds2_f"), xr.DataArray) + assert file_handler.get("fake_ds") is None + assert file_handler.get("fake_ds", "test") == "test" + + assert "ds2_f" in file_handler + assert "fake_ds" not in file_handler + + assert isinstance(file_handler["ds2_f/attr/test_ref"], np.ndarray) diff --git a/satpy/tests/reader_tests/test_hdfeos_base.py b/satpy/tests/reader_tests/test_hdfeos_base.py index 68b8928f2e..8fcb5660ad 100644 --- a/satpy/tests/reader_tests/test_hdfeos_base.py +++ b/satpy/tests/reader_tests/test_hdfeos_base.py @@ -19,7 +19,7 @@ import unittest -nrt_mda = '''GROUP = INVENTORYMETADATA +nrt_mda = """GROUP = INVENTORYMETADATA GROUPTYPE = MASTERGROUP GROUP = ECSDATAGRANULE @@ -439,175 +439,175 @@ END_GROUP = INVENTORYMETADATA -END''' # noqa: E501 +END""" # noqa: E501 nrt_mda_dict = { - 'INVENTORYMETADATA': { - 'ADDITIONALATTRIBUTES': { - 'ADDITIONALATTRIBUTESCONTAINER': { - 'ADDITIONALATTRIBUTENAME': { - 'VALUE': 'identifier_product_doi_authority' + "INVENTORYMETADATA": { + "ADDITIONALATTRIBUTES": { + "ADDITIONALATTRIBUTESCONTAINER": { + "ADDITIONALATTRIBUTENAME": { + "VALUE": "identifier_product_doi_authority" }, - 'INFORMATIONCONTENT': { - 'PARAMETERVALUE': { - 'VALUE': 'http://dx.doi.org' + "INFORMATIONCONTENT": { + "PARAMETERVALUE": { + "VALUE": "http://dx.doi.org" } } } }, - 'ASSOCIATEDPLATFORMINSTRUMENTSENSOR': { - 'ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER': { - 'ASSOCIATEDINSTRUMENTSHORTNAME': { - 'VALUE': 'MODIS' + "ASSOCIATEDPLATFORMINSTRUMENTSENSOR": { + "ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER": { + "ASSOCIATEDINSTRUMENTSHORTNAME": { + "VALUE": "MODIS" }, - 'ASSOCIATEDPLATFORMSHORTNAME': { - 'VALUE': 'Aqua' + "ASSOCIATEDPLATFORMSHORTNAME": { + "VALUE": "Aqua" }, - 'ASSOCIATEDSENSORSHORTNAME': { - 'VALUE': 'MODIS' + "ASSOCIATEDSENSORSHORTNAME": { + "VALUE": "MODIS" } } }, - 'COLLECTIONDESCRIPTIONCLASS': { - 'SHORTNAME': { - 'VALUE': 'MYD03' + "COLLECTIONDESCRIPTIONCLASS": { + "SHORTNAME": { + "VALUE": "MYD03" }, - 'VERSIONID': { - 'VALUE': 61 + "VERSIONID": { + "VALUE": 61 } }, - 'ECSDATAGRANULE': { - 'DAYNIGHTFLAG': { - 'VALUE': 'Day' + "ECSDATAGRANULE": { + "DAYNIGHTFLAG": { + "VALUE": "Day" }, - 'LOCALGRANULEID': { - 'VALUE': 'MYD03.A2019051.1225.061.2019051131153.NRT.hdf' + "LOCALGRANULEID": { + "VALUE": "MYD03.A2019051.1225.061.2019051131153.NRT.hdf" }, - 'LOCALVERSIONID': { - 'VALUE': '6.0.4' + "LOCALVERSIONID": { + "VALUE": "6.0.4" }, - 'PRODUCTIONDATETIME': { - 'VALUE': '2019-02-20T13:11:53.000Z' + "PRODUCTIONDATETIME": { + "VALUE": "2019-02-20T13:11:53.000Z" }, - 'REPROCESSINGACTUAL': { - 'VALUE': 'Near ' - 'Real ' - 'Time' + "REPROCESSINGACTUAL": { + "VALUE": "Near " + "Real " + "Time" }, - 'REPROCESSINGPLANNED': { - 'VALUE': 'further ' - 'update ' - 'is ' - 'anticipated' + "REPROCESSINGPLANNED": { + "VALUE": "further " + "update " + "is " + "anticipated" } }, - 'GROUPTYPE': 'MASTERGROUP', - 'INPUTGRANULE': { - 'INPUTPOINTER': { - 'VALUE': - ('MYD01.61.2019-051T12:25:00.000000Z.NA.29878844.500100_1.hdf', - 'MYD03LUT.coeff_V6.1.4', - 'PM1EPHND_NRT.A2019051.1220.061.2019051125628', - 'PM1EPHND_NRT.A2019051.1225.061.2019051125628', - 'PM1EPHND_NRT.A2019051.1230.061.2019051125628', ' ' - 'PM1ATTNR_NRT.A2019051.1220.061.2019051125628', - 'PM1ATTNR_NRT.A2019051.1225.061.2019051125628', - 'PM1ATTNR_NRT.A2019051.1230.061.2019051125628') + "GROUPTYPE": "MASTERGROUP", + "INPUTGRANULE": { + "INPUTPOINTER": { + "VALUE": + ("MYD01.61.2019-051T12:25:00.000000Z.NA.29878844.500100_1.hdf", + "MYD03LUT.coeff_V6.1.4", + "PM1EPHND_NRT.A2019051.1220.061.2019051125628", + "PM1EPHND_NRT.A2019051.1225.061.2019051125628", + "PM1EPHND_NRT.A2019051.1230.061.2019051125628", " " + "PM1ATTNR_NRT.A2019051.1220.061.2019051125628", + "PM1ATTNR_NRT.A2019051.1225.061.2019051125628", + "PM1ATTNR_NRT.A2019051.1230.061.2019051125628") } }, - 'MEASUREDPARAMETER': { - 'MEASUREDPARAMETERCONTAINER': { - 'PARAMETERNAME': { - 'VALUE': 'Geolocation' + "MEASUREDPARAMETER": { + "MEASUREDPARAMETERCONTAINER": { + "PARAMETERNAME": { + "VALUE": "Geolocation" }, - 'QAFLAGS': { - 'AUTOMATICQUALITYFLAG': { - 'VALUE': 'Passed' + "QAFLAGS": { + "AUTOMATICQUALITYFLAG": { + "VALUE": "Passed" }, - 'AUTOMATICQUALITYFLAGEXPLANATION': { - 'VALUE': - 'Set ' - 'to ' + "AUTOMATICQUALITYFLAGEXPLANATION": { + "VALUE": + "Set " + "to " "'Failed' " - 'if ' - 'processing ' - 'error ' - 'occurred, ' - 'set ' - 'to ' + "if " + "processing " + "error " + "occurred, " + "set " + "to " "'Passed' " - 'otherwise' + "otherwise" }, - 'SCIENCEQUALITYFLAG': { - 'VALUE': 'Not ' - 'Investigated' + "SCIENCEQUALITYFLAG": { + "VALUE": "Not " + "Investigated" } }, - 'QASTATS': { - 'QAPERCENTMISSINGDATA': { - 'VALUE': 0 + "QASTATS": { + "QAPERCENTMISSINGDATA": { + "VALUE": 0 }, - 'QAPERCENTOUTOFBOUNDSDATA': { - 'VALUE': 0 + "QAPERCENTOUTOFBOUNDSDATA": { + "VALUE": 0 } } } }, - 'ORBITCALCULATEDSPATIALDOMAIN': { - 'ORBITCALCULATEDSPATIALDOMAINCONTAINER': { - 'EQUATORCROSSINGDATE': { - 'VALUE': '2019-02-20' + "ORBITCALCULATEDSPATIALDOMAIN": { + "ORBITCALCULATEDSPATIALDOMAINCONTAINER": { + "EQUATORCROSSINGDATE": { + "VALUE": "2019-02-20" }, - 'EQUATORCROSSINGLONGITUDE': { - 'VALUE': -151.260740805733 + "EQUATORCROSSINGLONGITUDE": { + "VALUE": -151.260740805733 }, - 'EQUATORCROSSINGTIME': { - 'VALUE': '12:49:52.965727' + "EQUATORCROSSINGTIME": { + "VALUE": "12:49:52.965727" }, - 'ORBITNUMBER': { - 'VALUE': 89393 + "ORBITNUMBER": { + "VALUE": 89393 } } }, - 'PGEVERSIONCLASS': { - 'PGEVERSION': { - 'VALUE': '6.1.4' + "PGEVERSIONCLASS": { + "PGEVERSION": { + "VALUE": "6.1.4" } }, - 'RANGEDATETIME': { - 'RANGEBEGINNINGDATE': { - 'VALUE': '2019-02-20' + "RANGEDATETIME": { + "RANGEBEGINNINGDATE": { + "VALUE": "2019-02-20" }, - 'RANGEBEGINNINGTIME': { - 'VALUE': '12:25:00.000000' + "RANGEBEGINNINGTIME": { + "VALUE": "12:25:00.000000" }, - 'RANGEENDINGDATE': { - 'VALUE': '2019-02-20' + "RANGEENDINGDATE": { + "VALUE": "2019-02-20" }, - 'RANGEENDINGTIME': { - 'VALUE': '12:30:00.000000' + "RANGEENDINGTIME": { + "VALUE": "12:30:00.000000" } }, - 'SPATIALDOMAINCONTAINER': { - 'HORIZONTALSPATIALDOMAINCONTAINER': { - 'GPOLYGON': { - 'GPOLYGONCONTAINER': { - 'GRING': { - 'EXCLUSIONGRINGFLAG': { - 'VALUE': 'N' + "SPATIALDOMAINCONTAINER": { + "HORIZONTALSPATIALDOMAINCONTAINER": { + "GPOLYGON": { + "GPOLYGONCONTAINER": { + "GRING": { + "EXCLUSIONGRINGFLAG": { + "VALUE": "N" } }, - 'GRINGPOINT': { - 'GRINGPOINTLATITUDE': { - 'VALUE': (29.5170117594673, 26.1480434828114, + "GRINGPOINT": { + "GRINGPOINTLATITUDE": { + "VALUE": (29.5170117594673, 26.1480434828114, 43.2445462598877, 47.7959787025408) }, - 'GRINGPOINTLONGITUDE': { - 'VALUE': (25.3839329817764, 1.80418778807854, + "GRINGPOINTLONGITUDE": { + "VALUE": (25.3839329817764, 1.80418778807854, -6.50842421663422, 23.0260060198343) }, - 'GRINGPOINTSEQUENCENO': { - 'VALUE': (1, 2, 3, 4) + "GRINGPOINTSEQUENCENO": { + "VALUE": (1, 2, 3, 4) } } } @@ -673,7 +673,7 @@ def test_read_mda(self): """Test reading basic metadata.""" from satpy.readers.hdfeos_base import HDFEOSBaseFileReader res = HDFEOSBaseFileReader.read_mda(nrt_mda) - self.assertDictEqual(res, nrt_mda_dict) + assert res == nrt_mda_dict def test_read_mda_geo_resolution(self): """Test reading geo resolution.""" @@ -681,8 +681,8 @@ def test_read_mda_geo_resolution(self): resolution_l1b = HDFEOSGeoReader.read_geo_resolution( HDFEOSGeoReader.read_mda(metadata_modisl1b) ) - self.assertEqual(resolution_l1b, 1000) + assert resolution_l1b == 1000 resolution_l2 = HDFEOSGeoReader.read_geo_resolution( HDFEOSGeoReader.read_mda(metadata_modisl2) ) - self.assertEqual(resolution_l2, 5000) + assert resolution_l2 == 5000 diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py index f8ad241532..7edbd02329 100644 --- a/satpy/tests/reader_tests/test_hrit_base.py +++ b/satpy/tests/reader_tests/test_hrit_base.py @@ -41,77 +41,77 @@ class TestHRITDecompress(unittest.TestCase): def test_xrit_cmd(self): """Test running the xrit decompress command.""" - old_env = os.environ.get('XRIT_DECOMPRESS_PATH', None) + old_env = os.environ.get("XRIT_DECOMPRESS_PATH", None) - os.environ['XRIT_DECOMPRESS_PATH'] = '/path/to/my/bin' + os.environ["XRIT_DECOMPRESS_PATH"] = "/path/to/my/bin" self.assertRaises(IOError, get_xritdecompress_cmd) - os.environ['XRIT_DECOMPRESS_PATH'] = gettempdir() + os.environ["XRIT_DECOMPRESS_PATH"] = gettempdir() self.assertRaises(IOError, get_xritdecompress_cmd) with NamedTemporaryFile() as fd: - os.environ['XRIT_DECOMPRESS_PATH'] = fd.name + os.environ["XRIT_DECOMPRESS_PATH"] = fd.name fname = fd.name res = get_xritdecompress_cmd() if old_env is not None: - os.environ['XRIT_DECOMPRESS_PATH'] = old_env + os.environ["XRIT_DECOMPRESS_PATH"] = old_env else: - os.environ.pop('XRIT_DECOMPRESS_PATH') + os.environ.pop("XRIT_DECOMPRESS_PATH") - self.assertEqual(fname, res) + assert fname == res def test_xrit_outfile(self): """Test the right decompression filename is used.""" stdout = [b"Decompressed file: bla.__\n"] outfile = get_xritdecompress_outfile(stdout) - self.assertEqual(outfile, b'bla.__') + assert outfile == b"bla.__" - @mock.patch('satpy.readers.hrit_base.Popen') + @mock.patch("satpy.readers.hrit_base.Popen") def test_decompress(self, popen): """Test decompression works.""" popen.return_value.returncode = 0 popen.return_value.communicate.return_value = [b"Decompressed file: bla.__\n"] - old_env = os.environ.get('XRIT_DECOMPRESS_PATH', None) + old_env = os.environ.get("XRIT_DECOMPRESS_PATH", None) with NamedTemporaryFile() as fd: - os.environ['XRIT_DECOMPRESS_PATH'] = fd.name - res = decompress('bla.C_') + os.environ["XRIT_DECOMPRESS_PATH"] = fd.name + res = decompress("bla.C_") if old_env is not None: - os.environ['XRIT_DECOMPRESS_PATH'] = old_env + os.environ["XRIT_DECOMPRESS_PATH"] = old_env else: - os.environ.pop('XRIT_DECOMPRESS_PATH') + os.environ.pop("XRIT_DECOMPRESS_PATH") - self.assertEqual(res, os.path.join('.', 'bla.__')) + assert res == os.path.join(".", "bla.__") # From a compressed msg hrit file. # uncompressed data field length 17223680 # compressed data field length 1578312 -mda = {'file_type': 0, 'total_header_length': 6198, 'data_field_length': 17223680, 'number_of_bits_per_pixel': 10, - 'number_of_columns': 3712, 'number_of_lines': 464, 'compression_flag_for_data': 0, - 'projection_name': b'GEOS(+000.0) ', - 'cfac': -13642337, 'lfac': -13642337, 'coff': 1856, 'loff': 1856, - 'annotation_header': b'H-000-MSG4__-MSG4________-VIS006___-000001___-202208180730-C_', - 'cds_p_field': 64, 'timestamp': (23605, 27911151), 'GP_SC_ID': 324, - 'spectral_channel_id': 1, - 'segment_sequence_number': 1, 'planned_start_segment_number': 1, 'planned_end_segment_number': 8, - 'data_field_representation': 3, - 'image_segment_line_quality': np.array([(1, (0, 0), 1, 1, 0)] * 464, - dtype=[('line_number_in_grid', '>i4'), - ('line_mean_acquisition', [('days', '>u2'), - ('milliseconds', '>u4')]), - ('line_validity', 'u1'), - ('line_radiometric_quality', 'u1'), - ('line_geometric_quality', 'u1')]), - 'projection_parameters': {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, 'SSP_longitude': 0.0}, - 'orbital_parameters': {}} +mda = {"file_type": 0, "total_header_length": 6198, "data_field_length": 17223680, "number_of_bits_per_pixel": 10, + "number_of_columns": 3712, "number_of_lines": 464, "compression_flag_for_data": 0, + "projection_name": b"GEOS(+000.0) ", + "cfac": -13642337, "lfac": -13642337, "coff": 1856, "loff": 1856, + "annotation_header": b"H-000-MSG4__-MSG4________-VIS006___-000001___-202208180730-C_", + "cds_p_field": 64, "timestamp": (23605, 27911151), "GP_SC_ID": 324, + "spectral_channel_id": 1, + "segment_sequence_number": 1, "planned_start_segment_number": 1, "planned_end_segment_number": 8, + "data_field_representation": 3, + "image_segment_line_quality": np.array([(1, (0, 0), 1, 1, 0)] * 464, + dtype=[("line_number_in_grid", ">i4"), + ("line_mean_acquisition", [("days", ">u2"), + ("milliseconds", ">u4")]), + ("line_validity", "u1"), + ("line_radiometric_quality", "u1"), + ("line_geometric_quality", "u1")]), + "projection_parameters": {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, "SSP_longitude": 0.0}, + "orbital_parameters": {}} mda_compressed = mda.copy() mda_compressed["data_field_length"] = 1578312 -mda_compressed['compression_flag_for_data'] = 1 +mda_compressed["compression_flag_for_data"] = 1 def new_get_hd(instance, hdr_info): @@ -125,11 +125,11 @@ def new_get_hd(instance, hdr_info): def new_get_hd_compressed(instance, hdr_info): """Generate some metadata.""" instance.mda = mda.copy() - instance.mda['compression_flag_for_data'] = 1 - instance.mda['data_field_length'] = 1578312 + instance.mda["compression_flag_for_data"] = 1 + instance.mda["data_field_length"] = 1578312 -@pytest.fixture +@pytest.fixture() def stub_hrit_file(tmp_path): """Create a stub hrit file.""" filename = tmp_path / "some_hrit_file" @@ -139,21 +139,21 @@ def stub_hrit_file(tmp_path): def create_stub_hrit(filename, open_fun=open, meta=mda): """Create a stub hrit file.""" - nbits = meta['number_of_bits_per_pixel'] - lines = meta['number_of_lines'] - cols = meta['number_of_columns'] + nbits = meta["number_of_bits_per_pixel"] + lines = meta["number_of_lines"] + cols = meta["number_of_columns"] total_bits = lines * cols * nbits arr = np.random.randint(0, 256, size=int(total_bits / 8), dtype=np.uint8) with open_fun(filename, mode="wb") as fd: - fd.write(b" " * meta['total_header_length']) + fd.write(b" " * meta["total_header_length"]) bytes_data = arr.tobytes() fd.write(bytes_data) return filename -@pytest.fixture +@pytest.fixture() def stub_bzipped_hrit_file(tmp_path): """Create a stub bzipped hrit file.""" filename = tmp_path / "some_hrit_file.bz2" @@ -161,7 +161,7 @@ def stub_bzipped_hrit_file(tmp_path): return filename -@pytest.fixture +@pytest.fixture() def stub_gzipped_hrit_file(tmp_path): """Create a stub gzipped hrit file.""" filename = tmp_path / "some_hrit_file.gz" @@ -169,7 +169,7 @@ def stub_gzipped_hrit_file(tmp_path): return filename -@pytest.fixture +@pytest.fixture() def stub_compressed_hrit_file(tmp_path): """Create a stub compressed hrit file.""" filename = tmp_path / "some_hrit_file.C_" @@ -184,19 +184,19 @@ def setup_method(self, method): """Set up the hrit file handler for testing.""" del method - with mock.patch.object(HRITFileHandler, '_get_hd', new=new_get_hd): - self.reader = HRITFileHandler('filename', - {'platform_shortname': 'MSG3', - 'start_time': datetime(2016, 3, 3, 0, 0)}, - {'filetype': 'info'}, + with mock.patch.object(HRITFileHandler, "_get_hd", new=new_get_hd): + self.reader = HRITFileHandler("filename", + {"platform_shortname": "MSG3", + "start_time": datetime(2016, 3, 3, 0, 0)}, + {"filetype": "info"}, [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()]) - self.reader.mda['cfac'] = 5 - self.reader.mda['lfac'] = 5 - self.reader.mda['coff'] = 10 - self.reader.mda['loff'] = 10 - self.reader.mda['projection_parameters']['SSP_longitude'] = 44 + self.reader.mda["cfac"] = 5 + self.reader.mda["lfac"] = 5 + self.reader.mda["coff"] = 10 + self.reader.mda["loff"] = 10 + self.reader.mda["projection_parameters"]["SSP_longitude"] = 44 def test_get_xy_from_linecol(self): """Test get_xy_from_linecol.""" @@ -220,15 +220,15 @@ def test_get_area_extent(self): def test_get_area_def(self): """Test getting an area definition.""" from pyresample.utils import proj4_radius_parameters - area = self.reader.get_area_def('VIS06') + area = self.reader.get_area_def("VIS06") proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) assert a == 6378169.0 assert b == 6356583.8 - assert proj_dict['h'] == 35785831.0 - assert proj_dict['lon_0'] == 44.0 - assert proj_dict['proj'] == 'geos' - assert proj_dict['units'] == 'm' + assert proj_dict["h"] == 35785831.0 + assert proj_dict["lon_0"] == 44.0 + assert proj_dict["proj"] == "geos" + assert proj_dict["units"] == "m" assert area.area_extent == (-77771774058.38356, -77771774058.38356, 30310525626438.438, 3720765401003.719) @@ -236,7 +236,7 @@ def test_read_band_filepath(self, stub_hrit_file): """Test reading a single band from a filepath.""" self.reader.filename = stub_hrit_file - res = self.reader.read_band('VIS006', None) + res = self.reader.read_band("VIS006", None) assert res.compute().shape == (464, 3712) def test_read_band_FSFile(self, stub_hrit_file): @@ -247,14 +247,14 @@ def test_read_band_FSFile(self, stub_hrit_file): fs_file = fsspec.open(filename) self.reader.filename = FSFile(fs_file) - res = self.reader.read_band('VIS006', None) + res = self.reader.read_band("VIS006", None) assert res.compute().shape == (464, 3712) def test_read_band_bzipped2_filepath(self, stub_bzipped_hrit_file): """Test reading a single band from a bzipped file.""" self.reader.filename = stub_bzipped_hrit_file - res = self.reader.read_band('VIS006', None) + res = self.reader.read_band("VIS006", None) assert res.compute().shape == (464, 3712) def test_read_band_gzip_stream(self, stub_gzipped_hrit_file): @@ -265,7 +265,7 @@ def test_read_band_gzip_stream(self, stub_gzipped_hrit_file): fs_file = fsspec.open(filename, compression="gzip") self.reader.filename = FSFile(fs_file) - res = self.reader.read_band('VIS006', None) + res = self.reader.read_band("VIS006", None) assert res.compute().shape == (464, 3712) def test_start_end_time(self): @@ -276,7 +276,7 @@ def test_start_end_time(self): assert self.reader.end_time == self.reader.observation_end_time -def fake_decompress(infile, outdir='.'): +def fake_decompress(infile, outdir="."): """Fake decompression.""" filename = os.fspath(infile)[:-3] return create_stub_hrit(filename) @@ -290,15 +290,15 @@ def test_read_band_filepath(self, stub_compressed_hrit_file): filename = stub_compressed_hrit_file with mock.patch("satpy.readers.hrit_base.decompress", side_effect=fake_decompress) as mock_decompress: - with mock.patch.object(HRITFileHandler, '_get_hd', side_effect=new_get_hd, autospec=True) as get_hd: + with mock.patch.object(HRITFileHandler, "_get_hd", side_effect=new_get_hd, autospec=True) as get_hd: self.reader = HRITFileHandler(filename, - {'platform_shortname': 'MSG3', - 'start_time': datetime(2016, 3, 3, 0, 0)}, - {'filetype': 'info'}, + {"platform_shortname": "MSG3", + "start_time": datetime(2016, 3, 3, 0, 0)}, + {"filetype": "info"}, [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()]) - res = self.reader.read_band('VIS006', None) + res = self.reader.read_band("VIS006", None) assert get_hd.call_count == 1 assert mock_decompress.call_count == 0 assert res.compute().shape == (464, 3712) diff --git a/satpy/tests/reader_tests/test_hsaf_grib.py b/satpy/tests/reader_tests/test_hsaf_grib.py index bc8a2c2c73..da0f6dd86b 100644 --- a/satpy/tests/reader_tests/test_hsaf_grib.py +++ b/satpy/tests/reader_tests/test_hsaf_grib.py @@ -23,6 +23,7 @@ from unittest import mock import numpy as np +from pytest import approx # noqa: PT013 from satpy.tests.utils import make_dataid @@ -36,8 +37,8 @@ def __init__(self, values, proj_params=None, latlons=None, **attrs): self.attrs = attrs self.values = values if proj_params is None: - proj_params = {'a': 6378140.0, 'b': 6356755.0, 'lat_0': 0.0, - 'lon_0': 0.0, 'proj': 'geos', 'h': 35785830.098} + proj_params = {"a": 6378140.0, "b": 6356755.0, "lat_0": 0.0, + "lon_0": 0.0, "proj": "geos", "h": 35785830.098} self.projparams = proj_params self._latlons = latlons @@ -66,15 +67,15 @@ def __init__(self, messages=None, proj_params=None, latlons=None): self._messages = [ FakeMessage( values=np.arange(25.).reshape((5, 5)), - name='Instantaneous rain rate', - shortName='irrate', - cfName='unknown', - units='kg m**-2 s**-1', + name="Instantaneous rain rate", + shortName="irrate", + cfName="unknown", + units="kg m**-2 s**-1", dataDate=20190603, dataTime=1645, missingValue=9999, - modelName='unknown', - centreDescription='Rome', + modelName="unknown", + centreDescription="Rome", minimum=0.0, maximum=0.01475, Nx=3712, @@ -121,51 +122,51 @@ def setUp(self): except ImportError: pygrib = None self.orig_pygrib = pygrib - sys.modules['pygrib'] = mock.MagicMock() + sys.modules["pygrib"] = mock.MagicMock() def tearDown(self): """Re-enable pygrib import.""" - sys.modules['pygrib'] = self.orig_pygrib + sys.modules["pygrib"] = self.orig_pygrib - @mock.patch('satpy.readers.hsaf_grib.pygrib.open', return_value=FakeGRIB()) + @mock.patch("satpy.readers.hsaf_grib.pygrib.open", return_value=FakeGRIB()) def test_init(self, pg): """Test the init function, ensure that the correct dates and metadata are returned.""" pg.open.return_value = FakeGRIB() correct_dt = datetime(2019, 6, 3, 16, 45, 0) from satpy.readers.hsaf_grib import HSAFFileHandler - fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock()) - self.assertEqual(fh._analysis_time, correct_dt) - self.assertEqual(fh.metadata['projparams']['lat_0'], 0.0) - self.assertEqual(fh.metadata['shortName'], 'irrate') - self.assertEqual(fh.metadata['nx'], 3712) + fh = HSAFFileHandler("filename", mock.MagicMock(), mock.MagicMock()) + assert fh._analysis_time == correct_dt + assert fh.metadata["projparams"]["lat_0"] == 0.0 + assert fh.metadata["shortName"] == "irrate" + assert fh.metadata["nx"] == 3712 - @mock.patch('satpy.readers.hsaf_grib.pygrib.open', return_value=FakeGRIB()) + @mock.patch("satpy.readers.hsaf_grib.pygrib.open", return_value=FakeGRIB()) def test_get_area_def(self, pg): """Test the area definition setup, checks the size and extent.""" pg.open.return_value = FakeGRIB() from satpy.readers.hsaf_grib import HSAFFileHandler - fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock()) - area_def = HSAFFileHandler.get_area_def(fh, 'H03B') - self.assertEqual(area_def.width, 3712) - self.assertAlmostEqual(area_def.area_extent[0], -5569209.3026, places=3) - self.assertAlmostEqual(area_def.area_extent[3], 5587721.9097, places=3) + fh = HSAFFileHandler("filename", mock.MagicMock(), mock.MagicMock()) + area_def = HSAFFileHandler.get_area_def(fh, "H03B") + assert area_def.width == 3712 + assert area_def.area_extent[0] == approx(-5569209.3026, abs=1e-3) + assert area_def.area_extent[3] == approx(5587721.9097, abs=1e-3) - @mock.patch('satpy.readers.hsaf_grib.pygrib.open', return_value=FakeGRIB()) + @mock.patch("satpy.readers.hsaf_grib.pygrib.open", return_value=FakeGRIB()) def test_get_dataset(self, pg): """Test reading the actual datasets from a grib file.""" pg.open.return_value = FakeGRIB() from satpy.readers.hsaf_grib import HSAFFileHandler # Instantaneous precipitation - fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock()) + fh = HSAFFileHandler("filename", mock.MagicMock(), mock.MagicMock()) fh.filename = "H03B" - ds_id = make_dataid(name='H03B') + ds_id = make_dataid(name="H03B") data = fh.get_dataset(ds_id, mock.Mock()) np.testing.assert_array_equal(data.values, np.arange(25.).reshape((5, 5))) # Accumulated precipitation - fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock()) + fh = HSAFFileHandler("filename", mock.MagicMock(), mock.MagicMock()) fh.filename = "H05B" - ds_id = make_dataid(name='H05B') + ds_id = make_dataid(name="H05B") data = fh.get_dataset(ds_id, mock.Mock()) np.testing.assert_array_equal(data.values, np.arange(25.).reshape((5, 5))) diff --git a/satpy/tests/reader_tests/test_hsaf_h5.py b/satpy/tests/reader_tests/test_hsaf_h5.py index 4d574b6eb4..49658e6727 100644 --- a/satpy/tests/reader_tests/test_hsaf_h5.py +++ b/satpy/tests/reader_tests/test_hsaf_h5.py @@ -21,42 +21,42 @@ def sc_h5_file(tmp_path_factory): """Create a fake HSAF SC HDF5 file.""" filename = tmp_path_factory.mktemp("data") / "h10_20221115_day_merged.H5" h5f = h5py.File(filename, mode="w") - h5f.create_dataset('SC', SHAPE_SC, dtype=np.uint8) - h5f.create_dataset('colormap', SHAPE_SC_COLORMAP, dtype=np.uint8) + h5f.create_dataset("SC", SHAPE_SC, dtype=np.uint8) + h5f.create_dataset("colormap", SHAPE_SC_COLORMAP, dtype=np.uint8) return str(filename) def _get_scene_with_loaded_sc_datasets(filename): """Return a scene with SC and SC_pal loaded.""" loaded_scene = Scene(filenames=[filename], reader="hsaf_h5") - loaded_scene.load(['SC', 'SC_pal']) + loaded_scene.load(["SC", "SC_pal"]) return loaded_scene def test_hsaf_sc_dataset(sc_h5_file): """Test the H-SAF SC dataset.""" loaded_scene = _get_scene_with_loaded_sc_datasets(sc_h5_file) - assert loaded_scene['SC'].shape == SHAPE_SC + assert loaded_scene["SC"].shape == SHAPE_SC def test_hsaf_sc_colormap_dataset(sc_h5_file): """Test the H-SAF SC_pal dataset.""" loaded_scene = _get_scene_with_loaded_sc_datasets(sc_h5_file) - assert loaded_scene['SC_pal'].shape == SHAPE_SC_COLORMAP + assert loaded_scene["SC_pal"].shape == SHAPE_SC_COLORMAP def test_hsaf_sc_datetime(sc_h5_file): """Test the H-SAF reference time.""" loaded_scene = _get_scene_with_loaded_sc_datasets(sc_h5_file) fname = os.path.basename(sc_h5_file) - dtstr = fname.split('_')[1] + dtstr = fname.split("_")[1] obs_time = datetime.strptime(dtstr, "%Y%m%d") - assert loaded_scene['SC'].attrs['data_time'] == obs_time + assert loaded_scene["SC"].attrs["data_time"] == obs_time def test_hsaf_sc_areadef(sc_h5_file): """Test the H-SAF SC area definition.""" loaded_scene = _get_scene_with_loaded_sc_datasets(sc_h5_file) - fd_def = get_area_def('msg_seviri_fes_3km') + fd_def = get_area_def("msg_seviri_fes_3km") hsaf_def = fd_def[AREA_Y_OFFSET:AREA_Y_OFFSET+SHAPE_SC[0], AREA_X_OFFSET:AREA_X_OFFSET+SHAPE_SC[1]] - assert loaded_scene['SC'].area == hsaf_def + assert loaded_scene["SC"].area == hsaf_def diff --git a/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py b/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py index b2a5d4d3e1..416d74d16e 100644 --- a/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py +++ b/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py @@ -49,237 +49,237 @@ def __getitem__(self, key): def _get_geo_data(self, num_rows, num_cols): geo = { - 'wvc_lon': + "wvc_lon": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ - 'fill_value': 1.7e+38, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'degree', - 'valid range': [0, 359.99], + "fill_value": 1.7e+38, + "scale_factor": 1., + "add_offset": 0., + "units": "degree", + "valid range": [0, 359.99], }, - dims=('y', 'x')), - 'wvc_lat': + dims=("y", "x")), + "wvc_lat": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ - 'fill_value': 1.7e+38, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'degree', - 'valid range': [-90.0, 90.0], + "fill_value": 1.7e+38, + "scale_factor": 1., + "add_offset": 0., + "units": "degree", + "valid range": [-90.0, 90.0], }, - dims=('y', 'x')), + dims=("y", "x")), } return geo def _get_geo_data_nsoas(self, num_rows, num_cols): geo = { - 'wvc_lon': + "wvc_lon": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ - 'fill_value': 1.7e+38, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'degree', - 'valid_range': [0, 359.99], + "fill_value": 1.7e+38, + "scale_factor": 1., + "add_offset": 0., + "units": "degree", + "valid_range": [0, 359.99], }, - dims=('y', 'x')), - 'wvc_lat': + dims=("y", "x")), + "wvc_lat": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.float32), attrs={ - 'fill_value': 1.7e+38, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'degree', - 'valid_range': [-90.0, 90.0], + "fill_value": 1.7e+38, + "scale_factor": 1., + "add_offset": 0., + "units": "degree", + "valid_range": [-90.0, 90.0], }, - dims=('y', 'x')), + dims=("y", "x")), } return geo def _get_selection_data(self, num_rows, num_cols): selection = { - 'wvc_selection': + "wvc_selection": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ - 'fill_value': 0, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'count', - 'valid range': [1, 8], + "fill_value": 0, + "scale_factor": 1., + "add_offset": 0., + "units": "count", + "valid range": [1, 8], }, - dims=('y', 'x')), - 'wind_speed_selection': + dims=("y", "x")), + "wind_speed_selection": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 0.1, - 'add_offset': 0., - 'units': 'deg', - 'valid range': [0, 3599], + "fill_value": -32767, + "scale_factor": 0.1, + "add_offset": 0., + "units": "deg", + "valid range": [0, 3599], }, - dims=('y', 'x')), - 'wind_dir_selection': + dims=("y", "x")), + "wind_dir_selection": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 0.01, - 'add_offset': 0., - 'units': 'm/s', - 'valid range': [0, 5000], + "fill_value": -32767, + "scale_factor": 0.01, + "add_offset": 0., + "units": "m/s", + "valid range": [0, 5000], }, - dims=('y', 'x')), - 'model_dir': + dims=("y", "x")), + "model_dir": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 0.01, - 'add_offset': 0., - 'units': 'm/s', - 'valid range': [0, 5000], + "fill_value": -32767, + "scale_factor": 0.01, + "add_offset": 0., + "units": "m/s", + "valid range": [0, 5000], }, - dims=('y', 'x')), - 'model_speed': + dims=("y", "x")), + "model_speed": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 0.1, - 'add_offset': 0., - 'units': 'deg', - 'valid range': [0, 3599], + "fill_value": -32767, + "scale_factor": 0.1, + "add_offset": 0., + "units": "deg", + "valid range": [0, 3599], }, - dims=('y', 'x')), - 'num_ambigs': + dims=("y", "x")), + "num_ambigs": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ - 'fill_value': 0, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'count', - 'valid range': [1, 8], + "fill_value": 0, + "scale_factor": 1., + "add_offset": 0., + "units": "count", + "valid range": [1, 8], }, - dims=('y', 'x')), - 'num_in_aft': + dims=("y", "x")), + "num_in_aft": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ - 'fill_value': 0, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'count', - 'valid range': [1, 127], + "fill_value": 0, + "scale_factor": 1., + "add_offset": 0., + "units": "count", + "valid range": [1, 127], }, - dims=('y', 'x')), - 'num_in_fore': + dims=("y", "x")), + "num_in_fore": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ - 'fill_value': 0, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'count', - 'valid range': [1, 127], + "fill_value": 0, + "scale_factor": 1., + "add_offset": 0., + "units": "count", + "valid range": [1, 127], }, - dims=('y', 'x')), - 'num_out_aft': + dims=("y", "x")), + "num_out_aft": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ - 'fill_value': 0, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'count', - 'valid range': [1, 127], + "fill_value": 0, + "scale_factor": 1., + "add_offset": 0., + "units": "count", + "valid range": [1, 127], }, - dims=('y', 'x')), - 'num_out_fore': + dims=("y", "x")), + "num_out_fore": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.int8), attrs={ - 'fill_value': 0, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'count', - 'valid range': [1, 127], + "fill_value": 0, + "scale_factor": 1., + "add_offset": 0., + "units": "count", + "valid range": [1, 127], }, - dims=('y', 'x')), - 'wvc_quality_flag': + dims=("y", "x")), + "wvc_quality_flag": xr.DataArray( da.ones((num_rows, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'fill_value': 2.14748e+09, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'na', - 'valid range': [1, 2.14748e+09], + "fill_value": 2.14748e+09, + "scale_factor": 1., + "add_offset": 0., + "units": "na", + "valid range": [1, 2.14748e+09], }, - dims=('y', 'x')), + dims=("y", "x")), } return selection def _get_all_ambiguities_data(self, num_rows, num_cols, num_amb): all_amb = { - 'max_likelihood_est': + "max_likelihood_est": xr.DataArray( da.ones((num_rows, num_cols, num_amb), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 1., - 'add_offset': 0., - 'units': 'na', - 'valid range': [0, 32767], + "fill_value": -32767, + "scale_factor": 1., + "add_offset": 0., + "units": "na", + "valid range": [0, 32767], }, - dims=('y', 'x', 'selection')), - 'wind_dir': + dims=("y", "x", "selection")), + "wind_dir": xr.DataArray( da.ones((num_rows, num_cols, num_amb), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 0.1, - 'add_offset': 0., - 'units': 'deg', - 'valid range': [0, 3599], + "fill_value": -32767, + "scale_factor": 0.1, + "add_offset": 0., + "units": "deg", + "valid range": [0, 3599], }, - dims=('y', 'x', 'selection')), - 'wind_speed': + dims=("y", "x", "selection")), + "wind_speed": xr.DataArray( da.ones((num_rows, num_cols, num_amb), chunks=1024, dtype=np.int16), attrs={ - 'fill_value': -32767, - 'scale_factor': 0.01, - 'add_offset': 0., - 'units': 'm/s', - 'valid range': [0, 5000], + "fill_value": -32767, + "scale_factor": 0.01, + "add_offset": 0., + "units": "m/s", + "valid range": [0, 5000], }, - dims=('y', 'x', 'selection')), + dims=("y", "x", "selection")), } return all_amb @@ -295,49 +295,49 @@ def _get_wvc_row_time(self, num_rows): "20200326T01:11:38.074", "20200326T01:11:41.887"] wvc_row_time = { - 'wvc_row_time': + "wvc_row_time": xr.DataArray(data, attrs={ - 'fill_value': "", + "fill_value": "", }, - dims=('y',)), + dims=("y",)), } return wvc_row_time def _get_global_attrs(self, num_rows, num_cols): return { - '/attr/Equator_Crossing_Longitude': '246.408397', - '/attr/Equator_Crossing_Time': '20200326T01:37:15.875', - '/attr/HDF_Version_Id': 'HDF5-1.8.16', - '/attr/Input_L2A_Filename': 'H2B_OPER_SCA_L2A_OR_20200326T010839_20200326T025757_07076_dps_250_20.h5', - '/attr/Instrument_ShorName': 'HSCAT-B', - '/attr/L2A_Inputdata_Version': '10', - '/attr/L2B_Actual_WVC_Rows': np.int32(num_rows), - '/attr/L2B_Algorithm_Descriptor': ('Wind retrieval processing uses the multiple solution scheme (MSS) for ' - 'wind inversion with the NSCAT-4 GMF,and a circular median filter ' - 'method (CMF) for ambiguity removal. The ECMWF/NCEP forescate data are ' - 'used as background winds in the CMF'), - '/attr/L2B_Data_Version': '10', - '/attr/L2B_Expected_WVC_Rows': np.int32(num_rows), - '/attr/L2B_Processing_Type': 'OPER', - '/attr/L2B_Processor_Name': 'hy2_sca_l2b_pro', - '/attr/L2B_Processor_Version': '01.00', - '/attr/Long_Name': 'HY-2B/SCAT Level 2B Ocean Wind Vectors in 25.0 km Swath Grid', - '/attr/Orbit_Inclination': np.float32(99.3401), - '/attr/Orbit_Number': '07076', - '/attr/Output_L2B_Filename': 'H2B_OPER_SCA_L2B_OR_20200326T011107_20200326T025540_07076_dps_250_20_owv.h5', - '/attr/Platform_LongName': 'Haiyang 2B Ocean Observing Satellite', - '/attr/Platform_ShortName': 'HY-2B', - '/attr/Platform_Type': 'spacecraft', - '/attr/Producer_Agency': 'Ministry of Natural Resources of the People\'s Republic of China', - '/attr/Producer_Institution': 'NSOAS', - '/attr/Production_Date_Time': '20200326T06:23:10', - '/attr/Range_Beginning_Time': '20200326T01:11:07', - '/attr/Range_Ending_Time': '20200326T02:55:40', - '/attr/Rev_Orbit_Period': '14 days', - '/attr/Short_Name': 'HY-2B SCAT-L2B-25km', - '/attr/Sigma0_Granularity': 'whole pulse', - '/attr/WVC_Size': '25000m*25000m', + "/attr/Equator_Crossing_Longitude": "246.408397", + "/attr/Equator_Crossing_Time": "20200326T01:37:15.875", + "/attr/HDF_Version_Id": "HDF5-1.8.16", + "/attr/Input_L2A_Filename": "H2B_OPER_SCA_L2A_OR_20200326T010839_20200326T025757_07076_dps_250_20.h5", + "/attr/Instrument_ShorName": "HSCAT-B", + "/attr/L2A_Inputdata_Version": "10", + "/attr/L2B_Actual_WVC_Rows": np.int32(num_rows), + "/attr/L2B_Algorithm_Descriptor": ("Wind retrieval processing uses the multiple solution scheme (MSS) for " + "wind inversion with the NSCAT-4 GMF,and a circular median filter " + "method (CMF) for ambiguity removal. The ECMWF/NCEP forescate data are " + "used as background winds in the CMF"), + "/attr/L2B_Data_Version": "10", + "/attr/L2B_Expected_WVC_Rows": np.int32(num_rows), + "/attr/L2B_Processing_Type": "OPER", + "/attr/L2B_Processor_Name": "hy2_sca_l2b_pro", + "/attr/L2B_Processor_Version": "01.00", + "/attr/Long_Name": "HY-2B/SCAT Level 2B Ocean Wind Vectors in 25.0 km Swath Grid", + "/attr/Orbit_Inclination": np.float32(99.3401), + "/attr/Orbit_Number": "07076", + "/attr/Output_L2B_Filename": "H2B_OPER_SCA_L2B_OR_20200326T011107_20200326T025540_07076_dps_250_20_owv.h5", + "/attr/Platform_LongName": "Haiyang 2B Ocean Observing Satellite", + "/attr/Platform_ShortName": "HY-2B", + "/attr/Platform_Type": "spacecraft", + "/attr/Producer_Agency": "Ministry of Natural Resources of the People\'s Republic of China", + "/attr/Producer_Institution": "NSOAS", + "/attr/Production_Date_Time": "20200326T06:23:10", + "/attr/Range_Beginning_Time": "20200326T01:11:07", + "/attr/Range_Ending_Time": "20200326T02:55:40", + "/attr/Rev_Orbit_Period": "14 days", + "/attr/Short_Name": "HY-2B SCAT-L2B-25km", + "/attr/Sigma0_Granularity": "whole pulse", + "/attr/WVC_Size": "25000m*25000m", } def get_test_content(self, filename, filename_info, filetype_info): @@ -349,11 +349,11 @@ def get_test_content(self, filename, filename_info, filetype_info): test_content = {} test_content.update(self._get_global_attrs(num_rows, num_cols)) data = {} - if 'OPER_SCA_L2B' in filename: - test_content.update({'/attr/L2B_Expected_WVC_Cells': np.int32(num_cols)}) + if "OPER_SCA_L2B" in filename: + test_content.update({"/attr/L2B_Expected_WVC_Cells": np.int32(num_cols)}) data = self._get_geo_data_nsoas(num_rows, num_cols) else: - test_content.update({'/attr/L2B_Number_WVC_cells': np.int32(num_cols)}) + test_content.update({"/attr/L2B_Number_WVC_cells": np.int32(num_cols)}) data = self._get_geo_data(num_rows, num_cols) test_content.update(data) @@ -377,9 +377,9 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.hy2_scat_l2b_h5 import HY2SCATL2BH5FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(HY2SCATL2BH5FileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(HY2SCATL2BH5FileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -391,120 +391,120 @@ def test_load_geo(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] + "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) - self.assertEqual(1, len(files)) + assert 1 == len(files) reader.create_filehandlers(files) # Make sure we have some files - self.assertTrue(reader.file_handlers) + assert reader.file_handlers - res = reader.load(['wvc_lon', 'wvc_lat']) - self.assertEqual(2, len(res)) + res = reader.load(["wvc_lon", "wvc_lat"]) + assert 2 == len(res) def test_load_geo_nsoas(self): """Test loading data from nsoas file.""" from satpy.readers import load_reader filenames = [ - 'H2B_OPER_SCA_L2B_OR_20210803T100304_20210803T104601_13905_pwp_250_07_owv.h5', ] + "H2B_OPER_SCA_L2B_OR_20210803T100304_20210803T104601_13905_pwp_250_07_owv.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) - self.assertEqual(1, len(files)) + assert 1 == len(files) reader.create_filehandlers(files) # Make sure we have some files - self.assertTrue(reader.file_handlers) + assert reader.file_handlers - res = reader.load(['wvc_lon', 'wvc_lat']) - self.assertEqual(2, len(res)) + res = reader.load(["wvc_lon", "wvc_lat"]) + assert 2 == len(res) def test_load_data_selection(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] + "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) - self.assertEqual(1, len(files)) + assert 1 == len(files) reader.create_filehandlers(files) # Make sure we have some files - self.assertTrue(reader.file_handlers) - res = reader.load(['wind_speed_selection', - 'wind_dir_selection', - 'wvc_selection']) - self.assertEqual(3, len(res)) + assert reader.file_handlers + res = reader.load(["wind_speed_selection", + "wind_dir_selection", + "wvc_selection"]) + assert 3 == len(res) def test_load_data_all_ambiguities(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] + "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) - self.assertEqual(1, len(files)) + assert 1 == len(files) reader.create_filehandlers(files) # Make sure we have some files - self.assertTrue(reader.file_handlers) - res = reader.load(['wind_speed', - 'wind_dir', - 'max_likelihood_est', - 'model_dir', - 'model_speed', - 'num_ambigs', - 'num_in_aft', - 'num_in_fore', - 'num_out_aft', - 'num_out_fore', - 'wvc_quality_flag']) - self.assertEqual(11, len(res)) + assert reader.file_handlers + res = reader.load(["wind_speed", + "wind_dir", + "max_likelihood_est", + "model_dir", + "model_speed", + "num_ambigs", + "num_in_aft", + "num_in_fore", + "num_out_aft", + "num_out_fore", + "wvc_quality_flag"]) + assert 11 == len(res) def test_load_data_row_times(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] + "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) - self.assertEqual(1, len(files)) + assert 1 == len(files) reader.create_filehandlers(files) # Make sure we have some files - self.assertTrue(reader.file_handlers) - res = reader.load(['wvc_row_time']) - self.assertEqual(1, len(res)) + assert reader.file_handlers + res = reader.load(["wvc_row_time"]) + assert 1 == len(res) def test_reading_attrs(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] + "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(files) # Make sure we have some files - res = reader.load(['wvc_lon']) - self.assertEqual(res['wvc_lon'].attrs['L2B_Number_WVC_cells'], 10) + res = reader.load(["wvc_lon"]) + assert res["wvc_lon"].attrs["L2B_Number_WVC_cells"] == 10 with self.assertRaises(KeyError): - self.assertEqual(res['wvc_lon'].attrs['L2B_Expected_WVC_Cells'], 10) + assert res["wvc_lon"].attrs["L2B_Expected_WVC_Cells"] == 10 def test_reading_attrs_nsoas(self): """Test loading data.""" from satpy.readers import load_reader filenames = [ - 'H2B_OPER_SCA_L2B_OR_20210803T100304_20210803T104601_13905_pwp_250_07_owv.h5', ] + "H2B_OPER_SCA_L2B_OR_20210803T100304_20210803T104601_13905_pwp_250_07_owv.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(files) # Make sure we have some files - res = reader.load(['wvc_lon']) + res = reader.load(["wvc_lon"]) with self.assertRaises(KeyError): - self.assertEqual(res['wvc_lon'].attrs['L2B_Number_WVC_cells'], 10) - self.assertEqual(res['wvc_lon'].attrs['L2B_Expected_WVC_Cells'], 10) + assert res["wvc_lon"].attrs["L2B_Number_WVC_cells"] == 10 + assert res["wvc_lon"].attrs["L2B_Expected_WVC_Cells"] == 10 def test_properties(self): """Test platform_name.""" @@ -512,13 +512,13 @@ def test_properties(self): from satpy.readers import load_reader filenames = [ - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ] + "W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5", ] reader = load_reader(self.reader_configs) files = reader.select_files_from_pathnames(filenames) reader.create_filehandlers(files) # Make sure we have some files - res = reader.load(['wvc_lon']) - self.assertEqual(res['wvc_lon'].platform_name, 'HY-2B') - self.assertEqual(res['wvc_lon'].start_time, datetime(2020, 3, 26, 1, 11, 7)) - self.assertEqual(res['wvc_lon'].end_time, datetime(2020, 3, 26, 2, 55, 40)) + res = reader.load(["wvc_lon"]) + assert res["wvc_lon"].platform_name == "HY-2B" + assert res["wvc_lon"].start_time == datetime(2020, 3, 26, 1, 11, 7) + assert res["wvc_lon"].end_time == datetime(2020, 3, 26, 2, 55, 40) diff --git a/satpy/tests/reader_tests/test_iasi_l2.py b/satpy/tests/reader_tests/test_iasi_l2.py index 9dbfa7eef0..39382314d3 100644 --- a/satpy/tests/reader_tests/test_iasi_l2.py +++ b/satpy/tests/reader_tests/test_iasi_l2.py @@ -32,85 +32,85 @@ # Structure for the test data, to be written to HDF5 file TEST_DATA = { # Not implemented in the reader - 'Amsu': { - 'FLG_AMSUBAD': {'data': np.zeros((NUM_SCANLINES, 30), dtype=np.uint8), - 'attrs': {}} + "Amsu": { + "FLG_AMSUBAD": {"data": np.zeros((NUM_SCANLINES, 30), dtype=np.uint8), + "attrs": {}} }, # Not implemented in the reader - 'INFO': { - 'OmC': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'long_name': "Cloud signal. Predicted average window channel 'Obs minus Calc", - 'units': 'K'}}, - 'mdist': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}} + "INFO": { + "OmC": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"long_name": "Cloud signal. Predicted average window channel 'Obs minus Calc", + "units": "K"}}, + "mdist": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}} }, - 'L1C': { - 'Latitude': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'degrees_north'}}, - 'Longitude': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'degrees_north'}}, - 'SatAzimuth': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'degrees'}}, - 'SatZenith': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'degrees'}}, - 'SensingTime_day': {'data': np.array([6472], dtype=np.uint16), - 'attrs': {}}, - 'SensingTime_msec': {'data': np.array([37337532], dtype=np.uint32), - 'attrs': {}}, - 'SunAzimuth': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'degrees'}}, - 'SunZenith': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'degrees'}}, + "L1C": { + "Latitude": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "degrees_north"}}, + "Longitude": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "degrees_north"}}, + "SatAzimuth": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "degrees"}}, + "SatZenith": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "degrees"}}, + "SensingTime_day": {"data": np.array([6472], dtype=np.uint16), + "attrs": {}}, + "SensingTime_msec": {"data": np.array([37337532], dtype=np.uint32), + "attrs": {}}, + "SunAzimuth": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "degrees"}}, + "SunZenith": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "degrees"}}, }, # Not implemented in the reader - 'Maps': { - 'Height': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'm'}}, - 'HeightStd': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'units': 'm'}}, + "Maps": { + "Height": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "m"}}, + "HeightStd": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"units": "m"}}, }, # Not implemented in the reader - 'Mhs': { - 'FLG_MHSBAD': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.uint8), - 'attrs': {}} + "Mhs": { + "FLG_MHSBAD": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.uint8), + "attrs": {}} }, - 'PWLR': { - 'E': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, 10), dtype=np.float32), - 'attrs': {'emissivity_wavenumbers': np.array([699.3, 826.4, + "PWLR": { + "E": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH, 10), dtype=np.float32), + "attrs": {"emissivity_wavenumbers": np.array([699.3, 826.4, 925.9, 1075.2, 1204.8, 1315.7, 1724.1, 2000.0, 2325.5, 2702.7], dtype=np.float32)}}, - 'O': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), - 'attrs': {'long_name': 'Ozone mixing ratio vertical profile', - 'units': 'kg/kg'}}, - 'OC': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'P': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), - 'attrs': {'long_name': 'Atmospheric pressures at which the vertical profiles are given. ' - 'Last value is the surface pressure', - 'units': 'hpa'}}, - 'QE': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'QO': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'QP': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'QT': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'QTs': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'QW': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {}}, - 'T': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), - 'attrs': {'long_name': 'Temperature vertical profile', 'units': 'K'}}, - 'Ts': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'long_name': 'Surface skin temperature', 'units': 'K'}}, - 'W': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), - 'attrs': {'long_name': 'Water vapour mixing ratio vertical profile', 'units': 'kg/kg'}}, - 'WC': {'data': np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), - 'attrs': {'long_name': 'Water vapour total columnar amount', 'units': 'mm'}}, + "O": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), + "attrs": {"long_name": "Ozone mixing ratio vertical profile", + "units": "kg/kg"}}, + "OC": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "P": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), + "attrs": {"long_name": "Atmospheric pressures at which the vertical profiles are given. " + "Last value is the surface pressure", + "units": "hpa"}}, + "QE": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "QO": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "QP": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "QT": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "QTs": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "QW": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {}}, + "T": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), + "attrs": {"long_name": "Temperature vertical profile", "units": "K"}}, + "Ts": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"long_name": "Surface skin temperature", "units": "K"}}, + "W": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH, NUM_LEVELS), dtype=np.float32), + "attrs": {"long_name": "Water vapour mixing ratio vertical profile", "units": "kg/kg"}}, + "WC": {"data": np.zeros((NUM_SCANLINES, SCAN_WIDTH), dtype=np.float32), + "attrs": {"long_name": "Water vapour total columnar amount", "units": "mm"}}, } } @@ -118,17 +118,17 @@ def save_test_data(path): """Save the test to the indicated directory.""" import h5py - with h5py.File(os.path.join(path, FNAME), 'w') as fid: + with h5py.File(os.path.join(path, FNAME), "w") as fid: # Create groups for grp in TEST_DATA: fid.create_group(grp) # Write datasets for dset in TEST_DATA[grp]: - fid[grp][dset] = TEST_DATA[grp][dset]['data'] + fid[grp][dset] = TEST_DATA[grp][dset]["data"] # Write dataset attributes - for attr in TEST_DATA[grp][dset]['attrs']: + for attr in TEST_DATA[grp][dset]["attrs"]: fid[grp][dset].attrs[attr] = \ - TEST_DATA[grp][dset]['attrs'][attr] + TEST_DATA[grp][dset]["attrs"][attr] class TestIasiL2(unittest.TestCase): @@ -144,16 +144,16 @@ def setUp(self): self.base_dir = tempfile.mkdtemp() save_test_data(self.base_dir) self.fname = os.path.join(self.base_dir, FNAME) - self.fname_info = {'start_time': dt.datetime(2017, 9, 20, 10, 22, 17), - 'end_time': dt.datetime(2017, 9, 20, 10, 29, 12), - 'processing_time': dt.datetime(2017, 9, 20, 10, 35, 59), - 'processing_location': 'kan', - 'long_platform_id': 'metopb', - 'instrument': 'iasi', - 'platform_id': 'M01'} - self.ftype_info = {'file_reader': IASIL2HDF5, - 'file_patterns': ['{fname}.hdf'], - 'file_type': 'iasi_l2_hdf5'} + self.fname_info = {"start_time": dt.datetime(2017, 9, 20, 10, 22, 17), + "end_time": dt.datetime(2017, 9, 20, 10, 29, 12), + "processing_time": dt.datetime(2017, 9, 20, 10, 35, 59), + "processing_location": "kan", + "long_platform_id": "metopb", + "instrument": "iasi", + "platform_id": "M01"} + self.ftype_info = {"file_reader": IASIL2HDF5, + "file_patterns": ["{fname}.hdf"], + "file_type": "iasi_l2_hdf5"} self.reader = IASIL2HDF5(self.fname, self.fname_info, self.ftype_info) def tearDown(self): @@ -168,101 +168,101 @@ def test_scene(self): """Test scene creation.""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) - scn = Scene(reader='iasi_l2', filenames=[fname]) + scn = Scene(reader="iasi_l2", filenames=[fname]) assert scn.start_time is not None assert scn.end_time is not None assert scn.sensor_names - assert 'iasi' in scn.sensor_names + assert "iasi" in scn.sensor_names def test_scene_load_available_datasets(self): """Test that all datasets are available.""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) - scn = Scene(reader='iasi_l2', filenames=[fname]) + scn = Scene(reader="iasi_l2", filenames=[fname]) scn.load(scn.available_dataset_names()) def test_scene_load_pressure(self): """Test loading pressure data.""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) - scn = Scene(reader='iasi_l2', filenames=[fname]) - scn.load(['pressure']) - pres = scn['pressure'].compute() + scn = Scene(reader="iasi_l2", filenames=[fname]) + scn.load(["pressure"]) + pres = scn["pressure"].compute() self.check_pressure(pres, scn.attrs) def test_scene_load_emissivity(self): """Test loading emissivity data.""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) - scn = Scene(reader='iasi_l2', filenames=[fname]) - scn.load(['emissivity']) - emis = scn['emissivity'].compute() + scn = Scene(reader="iasi_l2", filenames=[fname]) + scn.load(["emissivity"]) + emis = scn["emissivity"].compute() self.check_emissivity(emis) def test_scene_load_sensing_times(self): """Test loading sensing times.""" from satpy import Scene fname = os.path.join(self.base_dir, FNAME) - scn = Scene(reader='iasi_l2', filenames=[fname]) - scn.load(['sensing_time']) - times = scn['sensing_time'].compute() + scn = Scene(reader="iasi_l2", filenames=[fname]) + scn.load(["sensing_time"]) + times = scn["sensing_time"].compute() self.check_sensing_times(times) def test_init(self): """Test reader initialization.""" - self.assertEqual(self.reader.filename, self.fname) - self.assertEqual(self.reader.finfo, self.fname_info) - self.assertTrue(self.reader.lons is None) - self.assertTrue(self.reader.lats is None) - self.assertEqual(self.reader.mda['platform_name'], 'Metop-B') - self.assertEqual(self.reader.mda['sensor'], 'iasi') + assert self.reader.filename == self.fname + assert self.reader.finfo == self.fname_info + assert self.reader.lons is None + assert self.reader.lats is None + assert self.reader.mda["platform_name"] == "Metop-B" + assert self.reader.mda["sensor"] == "iasi" def test_time_properties(self): """Test time properties.""" import datetime as dt - self.assertTrue(isinstance(self.reader.start_time, dt.datetime)) - self.assertTrue(isinstance(self.reader.end_time, dt.datetime)) + assert isinstance(self.reader.start_time, dt.datetime) + assert isinstance(self.reader.end_time, dt.datetime) def test_get_dataset(self): """Test get_dataset() for different datasets.""" from satpy.tests.utils import make_dataid - info = {'eggs': 'spam'} - key = make_dataid(name='pressure') + info = {"eggs": "spam"} + key = make_dataid(name="pressure") data = self.reader.get_dataset(key, info).compute() self.check_pressure(data) - self.assertTrue('eggs' in data.attrs) - self.assertEqual(data.attrs['eggs'], 'spam') - key = make_dataid(name='emissivity') + assert "eggs" in data.attrs + assert data.attrs["eggs"] == "spam" + key = make_dataid(name="emissivity") data = self.reader.get_dataset(key, info).compute() self.check_emissivity(data) - key = make_dataid(name='sensing_time') + key = make_dataid(name="sensing_time") data = self.reader.get_dataset(key, info).compute() - self.assertEqual(data.shape, (NUM_SCANLINES, SCAN_WIDTH)) + assert data.shape == (NUM_SCANLINES, SCAN_WIDTH) def check_pressure(self, pres, attrs=None): """Test reading pressure dataset. Helper function. """ - self.assertTrue(np.all(pres == 0.0)) - self.assertEqual(pres.x.size, SCAN_WIDTH) - self.assertEqual(pres.y.size, NUM_SCANLINES) - self.assertEqual(pres.level.size, NUM_LEVELS) + assert np.all(pres == 0.0) + assert pres.x.size == SCAN_WIDTH + assert pres.y.size == NUM_SCANLINES + assert pres.level.size == NUM_LEVELS if attrs: - self.assertEqual(pres.attrs['start_time'], attrs['start_time']) - self.assertEqual(pres.attrs['end_time'], attrs['end_time']) - self.assertTrue('long_name' in pres.attrs) - self.assertTrue('units' in pres.attrs) + assert pres.attrs["start_time"] == attrs["start_time"] + assert pres.attrs["end_time"] == attrs["end_time"] + assert "long_name" in pres.attrs + assert "units" in pres.attrs def check_emissivity(self, emis): """Test reading emissivity dataset. Helper function. """ - self.assertTrue(np.all(emis == 0.0)) - self.assertEqual(emis.x.size, SCAN_WIDTH) - self.assertEqual(emis.y.size, NUM_SCANLINES) - self.assertTrue('emissivity_wavenumbers' in emis.attrs) + assert np.all(emis == 0.0) + assert emis.x.size == SCAN_WIDTH + assert emis.y.size == NUM_SCANLINES + assert "emissivity_wavenumbers" in emis.attrs def check_sensing_times(self, times): """Test reading sensing times. @@ -272,8 +272,8 @@ def check_sensing_times(self, times): # Times should be equal in blocks of four, but not beyond, so # there should be SCAN_WIDTH/4 different values for i in range(int(SCAN_WIDTH / 4)): - self.assertEqual(np.unique(times[0, i*4:i*4+4]).size, 1) - self.assertEqual(np.unique(times[0, :]).size, SCAN_WIDTH / 4) + assert np.unique(times[0, i * 4:i * 4 + 4]).size == 1 + assert np.unique(times[0, :]).size == SCAN_WIDTH / 4 def test_read_dataset(self): """Test read_dataset() function.""" @@ -281,17 +281,17 @@ def test_read_dataset(self): from satpy.readers.iasi_l2 import read_dataset from satpy.tests.utils import make_dataid - with h5py.File(self.fname, 'r') as fid: - key = make_dataid(name='pressure') + with h5py.File(self.fname, "r") as fid: + key = make_dataid(name="pressure") data = read_dataset(fid, key).compute() self.check_pressure(data) - key = make_dataid(name='emissivity') + key = make_dataid(name="emissivity") data = read_dataset(fid, key).compute() self.check_emissivity(data) # This dataset doesn't have any attributes - key = make_dataid(name='ozone_total_column') + key = make_dataid(name="ozone_total_column") data = read_dataset(fid, key).compute() - self.assertEqual(len(data.attrs), 0) + assert len(data.attrs) == 0 def test_read_geo(self): """Test read_geo() function.""" @@ -299,24 +299,24 @@ def test_read_geo(self): from satpy.readers.iasi_l2 import read_geo from satpy.tests.utils import make_dataid - with h5py.File(self.fname, 'r') as fid: - key = make_dataid(name='sensing_time') + with h5py.File(self.fname, "r") as fid: + key = make_dataid(name="sensing_time") data = read_geo(fid, key).compute() - self.assertEqual(data.shape, (NUM_SCANLINES, SCAN_WIDTH)) - key = make_dataid(name='latitude') + assert data.shape == (NUM_SCANLINES, SCAN_WIDTH) + key = make_dataid(name="latitude") data = read_geo(fid, key).compute() - self.assertEqual(data.shape, (NUM_SCANLINES, SCAN_WIDTH)) + assert data.shape == (NUM_SCANLINES, SCAN_WIDTH) def test_form_datetimes(self): """Test _form_datetimes() function.""" from satpy.readers.iasi_l2 import _form_datetimes - days = TEST_DATA['L1C']['SensingTime_day']['data'] - msecs = TEST_DATA['L1C']['SensingTime_msec']['data'] + days = TEST_DATA["L1C"]["SensingTime_day"]["data"] + msecs = TEST_DATA["L1C"]["SensingTime_msec"]["data"] times = _form_datetimes(days, msecs) self.check_sensing_times(times) -@pytest.fixture +@pytest.fixture() def fake_iasi_l2_cdr_nc_dataset(): """Create minimally fake IASI L2 CDR NC dataset.""" shp = (3, 4, 5) @@ -371,7 +371,7 @@ def fake_iasi_l2_cdr_nc_dataset(): "pressure_levels": pres}) -@pytest.fixture +@pytest.fixture() def fake_iasi_l2_cdr_nc_file(fake_iasi_l2_cdr_nc_dataset, tmp_path): """Write a NetCDF file with minimal fake IASI L2 CDR NC data.""" fn = ("W_XX-EUMETSAT-Darmstadt,HYPERSPECT+SOUNDING,METOPA+PW3+" diff --git a/satpy/tests/reader_tests/test_iasi_l2_so2_bufr.py b/satpy/tests/reader_tests/test_iasi_l2_so2_bufr.py index 85df6b64ed..b75ac67dee 100644 --- a/satpy/tests/reader_tests/test_iasi_l2_so2_bufr.py +++ b/satpy/tests/reader_tests/test_iasi_l2_so2_bufr.py @@ -29,46 +29,46 @@ # bufr file distributed over EUMETCAST msg = { - 'unpack': 1, - 'inputDelayedDescriptorReplicationFactor': 5, - 'edition': 4, - 'masterTableNumber': 0, - 'bufrHeaderCentre': 254, - 'bufrHeaderSubCentre': 0, - 'updateSequenceNumber': 0, - 'dataCategory': 3, - 'internationalDataSubCategory': 255, - 'dataSubCategory': 230, - 'masterTablesVersionNumber': 31, - 'localTablesVersionNumber': 0, - 'typicalYear': 2020, - 'typicalMonth': 2, - 'typicalDay': 4, - 'typicalHour': 8, - 'typicalMinute': 59, - 'typicalSecond': 0, - 'numberOfSubsets': 120, - 'observedData': 1, - 'compressedData': 1, - 'unexpandedDescriptors': np.array([ + "unpack": 1, + "inputDelayedDescriptorReplicationFactor": 5, + "edition": 4, + "masterTableNumber": 0, + "bufrHeaderCentre": 254, + "bufrHeaderSubCentre": 0, + "updateSequenceNumber": 0, + "dataCategory": 3, + "internationalDataSubCategory": 255, + "dataSubCategory": 230, + "masterTablesVersionNumber": 31, + "localTablesVersionNumber": 0, + "typicalYear": 2020, + "typicalMonth": 2, + "typicalDay": 4, + "typicalHour": 8, + "typicalMinute": 59, + "typicalSecond": 0, + "numberOfSubsets": 120, + "observedData": 1, + "compressedData": 1, + "unexpandedDescriptors": np.array([ 1007, 1031, 25060, 2019, 2020, 4001, 4002, 4003, 4004, 4005, 4006, 5040, 201133, 5041, 201000, 5001, 6001, 5043, 7024, 5021, 7025, 5022, 7007, 40068, 7002, 15045, 12080, 102000, 31001, 7007, 15045], dtype=int), - '#1#satelliteIdentifier': 4, - '#1#centre': 254, - '#1#softwareIdentification': 605, - '#1#satelliteInstruments': 221, - '#1#satelliteClassification': 61, - '#1#year': 2020, - '#1#month': 2, - '#1#day': 4, - '#1#hour': 9, - '#1#minute': 1, - '#1#second': 11, - '#1#orbitNumber': 68984, - '#1#scanLineNumber': 447, - '#1#latitude': np.array([ + "#1#satelliteIdentifier": 4, + "#1#centre": 254, + "#1#softwareIdentification": 605, + "#1#satelliteInstruments": 221, + "#1#satelliteClassification": 61, + "#1#year": 2020, + "#1#month": 2, + "#1#day": 4, + "#1#hour": 9, + "#1#minute": 1, + "#1#second": 11, + "#1#orbitNumber": 68984, + "#1#scanLineNumber": 447, + "#1#latitude": np.array([ -33.4055, -33.6659, -33.738, -33.4648, -33.263, -33.5027, -33.5673, -33.3172, -33.1332, -33.3564, -33.4151, -33.1832, -33.0132, -33.2232, -33.2771, -33.0596, -32.903, -33.1021, -33.1522, -32.9466, -32.7982, -32.9884, -33.0354, -32.8395, @@ -85,7 +85,7 @@ -30.4071, -30.6153, -30.7036, -30.4967, -30.146, -30.3672, -30.4712, -30.2521, -29.8276, -30.0649, -30.1911, -29.9569, -29.4268, -29.6844, -29.8436, -29.5903]), - '#1#longitude': np.array([ + "#1#longitude": np.array([ 2.53790e+00, 2.49440e+00, 3.08690e+00, 3.12690e+00, 1.15600e+00, 1.11230e+00, 1.59640e+00, 1.63750e+00, -3.70000e-03, -4.73000e-02, 3.61900e-01, 4.03500e-01, -1.00010e+00, -1.04340e+00, -6.88300e-01, @@ -111,7 +111,7 @@ -1.59045e+01, -1.58264e+01, -1.73549e+01, -1.74460e+01, -1.69944e+01, -1.69085e+01, -1.87277e+01, -1.88302e+01, -1.82832e+01, -1.81873e+01]), - '#1#fieldOfViewNumber': np.array([ + "#1#fieldOfViewNumber": np.array([ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, @@ -120,7 +120,7 @@ 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120]), - '#1#satelliteZenithAngle': np.array([ + "#1#satelliteZenithAngle": np.array([ 56.64, 56.64, 58.38, 58.37, 52.15, 52.15, 53.8, 53.79, 47.84, 47.84, 49.42, 49.42, 43.67, 43.67, 45.21, 45.2, 39.59, 39.59, 41.1, 41.09, 35.59, 35.59, 37.08, 37.07, 31.65, 31.65, 33.11, 33.1, 27.75, 27.75, 29.2, 29.19, 23.89, 23.89, 25.33, 25.32, @@ -133,7 +133,7 @@ 49.52, 49.53, 47.94, 47.94, 53.89, 53.9, 52.25, 52.25, 58.48, 58.48, 56.74, 56.75]), - '#1#bearingOrAzimuth': np.array([ + "#1#bearingOrAzimuth": np.array([ 276.93, 278.61, 278.27, 276.61, 277.64, 279.42, 279.14, 277.38, 278.22, 280.11, 279.88, 278.01, 278.69, 280.72, 280.51, 278.51, 279.09, 281.3, 281.11, 278.94, 279.41, 281.83, 281.64, 279.28, 279.68, 282.36, 282.18, 279.58, 279.88, 282.9, @@ -147,7 +147,7 @@ 107.74, 105.67, 105.47, 107.64, 108.11, 106.2, 105.99, 107.98, 108.54, 106.76, 106.53, 108.38, 109.06, 107.39, 107.14, 108.87, 109.7, 108.13, 107.83, 109.46]), - '#1#solarZenithAngle': np.array([ + "#1#solarZenithAngle": np.array([ 44.36, 44.44, 43.98, 43.89, 45.47, 45.54, 45.16, 45.08, 46.4, 46.47, 46.14, 46.07, 47.21, 47.27, 46.99, 46.92, 47.92, 47.98, 47.73, 47.67, 48.56, 48.62, 48.39, 48.33, 49.15, 49.21, 49., 48.94, 49.7, 49.75, 49.55, 49.5, 50.21, 50.26, 50.07, 50.02, @@ -160,7 +160,7 @@ 59.98, 60.04, 59.70, 59.64, 60.98, 61.05, 60.65, 60.59, 62.20, 62.27, 61.78, 61.72]), - '#1#solarAzimuth': np.array([ + "#1#solarAzimuth": np.array([ 78.89, 78.66, 78.16, 78.41, 80.00, 79.80, 79.40, 79.62, 80.92, 80.74, 80.40, 80.6, 81.69, 81.53, 81.24, 81.42, 82.36, 82.21, 81.96, 82.12, 82.96, 82.82, 82.60, 82.74, 83.49, 83.36, 83.16, 83.3, 83.98, 83.86, 83.68, 83.80, 84.43, 84.32, 84.15, 84.27, @@ -172,11 +172,11 @@ 90.58, 90.49, 90.31, 90.4, 91.09, 91., 90.81, 90.89, 91.66, 91.57, 91.35, 91.44, 92.29, 92.20, 91.95, 92.04, 93.02, 92.93, 92.64, 92.73, 93.87, 93.79, 93.45, 93.54]), - '#1#height': 83270, - '#1#generalRetrievalQualityFlagForSo2': 9, - '#2#height': -1e+100, - '#1#sulphurDioxide': -1e+100, - '#1#brightnessTemperatureRealPart': np.array([ + "#1#height": 83270, + "#1#generalRetrievalQualityFlagForSo2": 9, + "#2#height": -1e+100, + "#1#sulphurDioxide": -1e+100, + "#1#brightnessTemperatureRealPart": np.array([ 0.11, 0.11, -0.07, 0.08, 0.13, 0.15, 0.10, 0.06, -0.02, -0.03, 0.08, 0.17, -0.05, 0.12, 0.08, -0.06, 0.15, 0.08, -0.04, -0.01, 0.06, 0.17, -0.01, 0.15, 0.18, 0.05, 0.11, -0.03, 0.09, 0.02, 0.04, 0.10, 0.00, 0.00, 0.01, 0.18, @@ -188,8 +188,8 @@ 0.08, -0.05, -0.08, 0.41, -0.19, -0.22, -0.03, 0.11, -0.26, -0.33, -0.08, 0.03, -0.05, 0.02, 0.17, -0.10, 0.01, 0.01, 0.05, 0.01, 0.15, -0.06, -0.14, 0.38]), - '#3#height': 7000, - '#2#sulphurDioxide': np.array([ + "#3#height": 7000, + "#2#sulphurDioxide": np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, @@ -209,8 +209,8 @@ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), - '#4#height': 10000, - '#3#sulphurDioxide': np.array([ + "#4#height": 10000, + "#3#sulphurDioxide": np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, @@ -227,8 +227,8 @@ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), - '#5#height': 13000, - '#4#sulphurDioxide': np.array([ + "#5#height": 13000, + "#4#sulphurDioxide": np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, @@ -245,8 +245,8 @@ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), - '#6#height': 16000, - '#5#sulphurDioxide': np.array([ + "#6#height": 16000, + "#5#sulphurDioxide": np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, @@ -263,8 +263,8 @@ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100]), - '#7#height': 25000, - '#6#sulphurDioxide': np.array([ + "#7#height": 25000, + "#6#sulphurDioxide": np.array([ -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, -1.0e+100, @@ -283,23 +283,23 @@ } # the notional filename that would contain the above test message data -FILENAME = 'W_XX-EUMETSAT-Darmstadt,SOUNDING+SATELLITE,METOPA+IASI_C_EUMC_20200204091455_68977_eps_o_so2_l2.bin' +FILENAME = "W_XX-EUMETSAT-Darmstadt,SOUNDING+SATELLITE,METOPA+IASI_C_EUMC_20200204091455_68977_eps_o_so2_l2.bin" # the information that would be extracted from the above filename according to the pattern in the .yaml FILENAME_INFO = { - 'reception_location': 'EUMETSAT-Darmstadt', - 'platform': 'METOPA', - 'instrument': 'IASI', - 'start_time': '20200204091455', - 'perigee': '68977', - 'species': 'so2', - 'level': 'l2' + "reception_location": "EUMETSAT-Darmstadt", + "platform": "METOPA", + "instrument": "IASI", + "start_time": "20200204091455", + "perigee": "68977", + "species": "so2", + "level": "l2" } # file type info for the above file that is defined in the .yaml FILETYPE_INFO = { - 'file_type': 'iasi_l2_so2_bufr', - 'file_reader': 'IASIL2SO2BUFR' + "file_type": "iasi_l2_so2_bufr", + "file_reader": "IASIL2SO2BUFR" } # number of cross track samples in one IASI scan @@ -314,7 +314,7 @@ def save_test_data(path): for m in [msg]: - buf = ec.codes_bufr_new_from_samples('BUFR4_local_satellite') + buf = ec.codes_bufr_new_from_samples("BUFR4_local_satellite") for key in m: val = m[key] @@ -325,7 +325,7 @@ def save_test_data(path): else: ec.codes_set_array(buf, key, val) - ec.codes_set(buf, 'pack', 1) + ec.codes_set(buf, "pack", 1) ec.codes_write(buf, f) ec.codes_release(buf) @@ -354,36 +354,36 @@ def tearDown(self): except OSError: pass - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene(self): """Test scene creation.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) - scn = Scene(reader='iasi_l2_so2_bufr', filenames=[fname]) + scn = Scene(reader="iasi_l2_so2_bufr", filenames=[fname]) assert scn.start_time is not None assert scn.end_time is not None assert scn.sensor_names - assert 'iasi' in scn.sensor_names + assert "iasi" in scn.sensor_names - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_load_available_datasets(self): """Test that all datasets are available.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) - scn = Scene(reader='iasi_l2_so2_bufr', filenames=[fname]) + scn = Scene(reader="iasi_l2_so2_bufr", filenames=[fname]) scn.load(scn.available_dataset_names()) - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def test_scene_dataset_values(self): """Test loading data.""" from satpy import Scene fname = os.path.join(self.base_dir, FILENAME) - scn = Scene(reader='iasi_l2_so2_bufr', filenames=[fname]) + scn = Scene(reader="iasi_l2_so2_bufr", filenames=[fname]) for name in scn.available_dataset_names(): @@ -391,13 +391,13 @@ def test_scene_dataset_values(self): loaded_values = scn[name].values - fill_value = scn[name].attrs['fill_value'] + fill_value = scn[name].attrs["fill_value"] # replace nans in data loaded from file with the fill value defined in the .yaml # to make them comparable loaded_values_nan_filled = np.nan_to_num(loaded_values, nan=fill_value) - key = scn[name].attrs['key'] + key = scn[name].attrs["key"] original_values = msg[key] @@ -405,9 +405,4 @@ def test_scene_dataset_values(self): # (note: if all subtests pass, they will count as one test) with self.subTest(msg="Test failed for dataset: "+name): - self.assertTrue(np.allclose(original_values, loaded_values_nan_filled)) - - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") - def test_init(self): - """Test reader initialization.""" - self.assertTrue(True) + assert np.allclose(original_values, loaded_values_nan_filled) diff --git a/satpy/tests/reader_tests/test_ici_l1b_nc.py b/satpy/tests/reader_tests/test_ici_l1b_nc.py index d98da24f31..498ca88705 100644 --- a/satpy/tests/reader_tests/test_ici_l1b_nc.py +++ b/satpy/tests/reader_tests/test_ici_l1b_nc.py @@ -43,51 +43,51 @@ N_183 = 3 -@pytest.fixture +@pytest.fixture() def reader(fake_file): """Return reader of ici level1b data.""" return IciL1bNCFileHandler( filename=fake_file, filename_info={ - 'sensing_start_time': ( - datetime.fromisoformat('2000-01-01T01:00:00') + "sensing_start_time": ( + datetime.fromisoformat("2000-01-01T01:00:00") ), - 'sensing_end_time': ( - datetime.fromisoformat('2000-01-01T02:00:00') + "sensing_end_time": ( + datetime.fromisoformat("2000-01-01T02:00:00") ), - 'creation_time': ( - datetime.fromisoformat('2000-01-01T03:00:00') + "creation_time": ( + datetime.fromisoformat("2000-01-01T03:00:00") ), }, filetype_info={ - 'longitude': 'data/navigation_data/longitude', - 'latitude': 'data/navigation_data/latitude', - 'solar_azimuth': 'data/navigation_data/ici_solar_azimuth_angle', - 'solar_zenith': 'data/navigation_data/ici_solar_zenith_angle', + "longitude": "data/navigation_data/longitude", + "latitude": "data/navigation_data/latitude", + "solar_azimuth": "data/navigation_data/ici_solar_azimuth_angle", + "solar_zenith": "data/navigation_data/ici_solar_zenith_angle", } ) -@pytest.fixture +@pytest.fixture() def fake_file(tmp_path): """Return file path to level1b file.""" - file_path = tmp_path / 'test_file_ici_l1b_nc.nc' + file_path = tmp_path / "test_file_ici_l1b_nc.nc" writer = IciL1bFakeFileWriter(file_path) writer.write() - yield file_path + return file_path -@pytest.fixture +@pytest.fixture() def dataset_info(): """Return dataset info.""" return { - 'name': '1', - 'file_type': 'nc_ici_l1b_rad', - 'file_key': 'data/measurement_data/ici_radiance_183', - 'coordinates': ['lat_pixels_horn_1', 'lon_pixels_horn_1'], - 'n_183': 0, - 'chan_index': 0, - 'calibration': 'brightness_temperature', + "name": "1", + "file_type": "nc_ici_l1b_rad", + "file_key": "data/measurement_data/ici_radiance_183", + "coordinates": ["lat_pixels_horn_1", "lon_pixels_horn_1"], + "n_183": 0, + "chan_index": 0, + "calibration": "brightness_temperature", } @@ -100,10 +100,10 @@ def __init__(self, file_path): def write(self): """Write fake data to file.""" - with Dataset(self.file_path, 'w') as dataset: + with Dataset(self.file_path, "w") as dataset: self._write_attributes(dataset) self._write_quality_group(dataset) - data_group = dataset.createGroup('data') + data_group = dataset.createGroup("data") self._write_measurement_data_group(data_group) self._write_navigation_data_group(data_group) @@ -118,59 +118,59 @@ def _write_attributes(dataset): @staticmethod def _write_quality_group(dataset): """Write the quality group.""" - group = dataset.createGroup('quality') + group = dataset.createGroup("quality") group.overall_quality_flag = 0 duration_of_product = group.createVariable( - 'duration_of_product', "f4" + "duration_of_product", "f4" ) duration_of_product[:] = 1000. @staticmethod def _write_navigation_data_group(dataset): """Write the navigation data group.""" - group = dataset.createGroup('navigation_data') - group.createDimension('n_scan', N_SCAN) - group.createDimension('n_samples', N_SAMPLES) - group.createDimension('n_subs', N_SUBS) - group.createDimension('n_horns', N_HORNS) - subs = group.createVariable('n_subs', "i4", dimensions=('n_subs',)) + group = dataset.createGroup("navigation_data") + group.createDimension("n_scan", N_SCAN) + group.createDimension("n_samples", N_SAMPLES) + group.createDimension("n_subs", N_SUBS) + group.createDimension("n_horns", N_HORNS) + subs = group.createVariable("n_subs", "i4", dimensions=("n_subs",)) subs[:] = np.arange(N_SUBS) - dimensions = ('n_scan', 'n_subs', 'n_horns') + dimensions = ("n_scan", "n_subs", "n_horns") shape = (N_SCAN, N_SUBS, N_HORNS) longitude = group.createVariable( - 'longitude', + "longitude", np.float32, dimensions=dimensions, ) longitude[:] = np.ones(shape) latitude = group.createVariable( - 'latitude', + "latitude", np.float32, dimensions=dimensions, ) latitude[:] = 2. * np.ones(shape) azimuth = group.createVariable( - 'ici_solar_azimuth_angle', + "ici_solar_azimuth_angle", np.float32, dimensions=dimensions, ) azimuth[:] = 3. * np.ones(shape) zenith = group.createVariable( - 'ici_solar_zenith_angle', + "ici_solar_zenith_angle", np.float32, dimensions=dimensions, ) zenith[:] = 4. * np.ones(shape) - dimensions = ('n_scan', 'n_samples', 'n_horns') + dimensions = ("n_scan", "n_samples", "n_horns") shape = (N_SCAN, N_SAMPLES, N_HORNS) delta_longitude = group.createVariable( - 'delta_longitude', + "delta_longitude", np.float32, dimensions=dimensions, ) delta_longitude[:] = 1000. * np.ones(shape) delta_latitude = group.createVariable( - 'delta_latitude', + "delta_latitude", np.float32, dimensions=dimensions, ) @@ -179,35 +179,35 @@ def _write_navigation_data_group(dataset): @staticmethod def _write_measurement_data_group(dataset): """Write the measurement data group.""" - group = dataset.createGroup('measurement_data') - group.createDimension('n_scan', N_SCAN) - group.createDimension('n_samples', N_SAMPLES) - group.createDimension('n_channels', N_CHANNELS) - group.createDimension('n_183', N_183) - scan = group.createVariable('n_scan', "i4", dimensions=('n_scan',)) + group = dataset.createGroup("measurement_data") + group.createDimension("n_scan", N_SCAN) + group.createDimension("n_samples", N_SAMPLES) + group.createDimension("n_channels", N_CHANNELS) + group.createDimension("n_183", N_183) + scan = group.createVariable("n_scan", "i4", dimensions=("n_scan",)) scan[:] = np.arange(N_SCAN) samples = group.createVariable( - 'n_samples', "i4", dimensions=('n_samples',) + "n_samples", "i4", dimensions=("n_samples",) ) samples[:] = np.arange(N_SAMPLES) bt_a = group.createVariable( - 'bt_conversion_a', np.float32, dimensions=('n_channels',) + "bt_conversion_a", np.float32, dimensions=("n_channels",) ) bt_a[:] = np.ones(N_CHANNELS) bt_b = group.createVariable( - 'bt_conversion_b', np.float32, dimensions=('n_channels',) + "bt_conversion_b", np.float32, dimensions=("n_channels",) ) bt_b[:] = np.zeros(N_CHANNELS) cw = group.createVariable( - 'centre_wavenumber', np.float32, dimensions=('n_channels',) + "centre_wavenumber", np.float32, dimensions=("n_channels",) ) cw[:] = np.array( [6.0] * 3 + [8.0] * 2 + [11.0] * 3 + [15.0] * 3 + [22.0] * 2 ) ici_radiance_183 = group.createVariable( - 'ici_radiance_183', + "ici_radiance_183", np.float32, - dimensions=('n_scan', 'n_samples', 'n_183'), + dimensions=("n_scan", "n_samples", "n_183"), ) ici_radiance_183[:] = 0.08 * np.ones((N_SCAN, N_SAMPLES, N_183)) @@ -254,11 +254,11 @@ def test_solar_zenith(self, reader): def test_calibrate_raises_for_unknown_calibration_method(self, reader): """Test perform calibration raises for unknown calibration method.""" variable = xr.DataArray(np.ones(3)) - dataset_info = {'calibration': 'unknown', 'name': 'radiance'} - with pytest.raises(ValueError, match='Unknown calibration'): + dataset_info = {"calibration": "unknown", "name": "radiance"} + with pytest.raises(ValueError, match="Unknown calibration"): reader._calibrate(variable, dataset_info) - @patch('satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt') + @patch("satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt") def test_calibrate_does_not_call_calibrate_bt_if_not_needed( self, mocked_calibrate, @@ -270,13 +270,13 @@ def test_calibrate_does_not_call_calibrate_bt_if_not_needed( [0.060, 0.065, 0.070, 0.075], [0.080, 0.085, 0.090, 0.095], ]), - dims=('n_scan', 'n_samples'), + dims=("n_scan", "n_samples"), ) - dataset_info = {'calibration': 'radiance'} + dataset_info = {"calibration": "radiance"} reader._calibrate(variable, dataset_info) mocked_calibrate.assert_not_called() - @patch('satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt') + @patch("satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt") def test_calibrate_calls_calibrate_bt( self, mocked_calibrate_bt, @@ -288,11 +288,11 @@ def test_calibrate_calls_calibrate_bt( [0.060, 0.065, 0.070, 0.075], [0.080, 0.085, 0.090, 0.095], ]), - dims=('n_scan', 'n_samples'), + dims=("n_scan", "n_samples"), ) dataset_info = { - 'calibration': 'brightness_temperature', - 'chan_index': 2, + "calibration": "brightness_temperature", + "chan_index": 2, } reader._calibrate(variable, dataset_info) mocked_calibrate_bt.assert_called_once_with( @@ -320,10 +320,10 @@ def test_calibrate_bt(self, reader): ]) np.testing.assert_allclose(bt, expected_bt) - @pytest.mark.parametrize('dims', ( - ('n_scan', 'n_samples'), - ('x', 'y'), - )) + @pytest.mark.parametrize("dims", [ + ("n_scan", "n_samples"), + ("x", "y"), + ]) def test_standardize_dims(self, reader, dims): """Test standardize dims.""" variable = xr.DataArray( @@ -331,12 +331,12 @@ def test_standardize_dims(self, reader, dims): dims=dims, ) standardized = reader._standardize_dims(variable) - assert standardized.dims == ('y', 'x') + assert standardized.dims == ("y", "x") - @pytest.mark.parametrize('dims,data_info,expect', ( - (('y', 'x', 'n_horns'), {"n_horns": 1}, 1), - (('y', 'x', 'n_183'), {"n_183": 2}, 2), - )) + @pytest.mark.parametrize(("dims", "data_info", "expect"), [ + (("y", "x", "n_horns"), {"n_horns": 1}, 1), + (("y", "x", "n_183"), {"n_183": 2}, 2), + ]) def test_filter_variable(self, reader, dims, data_info, expect): """Test filter variable.""" data = np.arange(24).reshape(2, 3, 4) @@ -345,7 +345,7 @@ def test_filter_variable(self, reader, dims, data_info, expect): dims=dims, ) filtered = reader._filter_variable(variable, data_info) - assert filtered.dims == ('y', 'x') + assert filtered.dims == ("y", "x") assert (filtered == data[:, :, expect]).all() def test_drop_coords(self, reader): @@ -353,7 +353,7 @@ def test_drop_coords(self, reader): coords = "dummy" data = xr.DataArray( np.ones(10), - dims=('y'), + dims=("y"), coords={coords: 0}, ) assert coords in data.coords @@ -362,22 +362,22 @@ def test_drop_coords(self, reader): def test_get_third_dimension_name(self, reader): """Test get third dimension name.""" - data = xr.DataArray(np.ones((1, 1, 1)), dims=('x', 'y', 'z')) - assert reader._get_third_dimension_name(data) == 'z' + data = xr.DataArray(np.ones((1, 1, 1)), dims=("x", "y", "z")) + assert reader._get_third_dimension_name(data) == "z" def test_get_third_dimension_name_return_none_for_2d_data(self, reader): """Test get third dimension name return none for 2d data.""" - data = xr.DataArray(np.ones((1, 1)), dims=('x', 'y')) + data = xr.DataArray(np.ones((1, 1)), dims=("x", "y")) assert reader._get_third_dimension_name(data) is None def test_get_dataset_return_none_if_data_not_exist(self, reader): """Tes get dataset return none if data does not exist.""" - dataset_id = {'name': 'unknown'} - dataset_info = {'file_key': 'non/existing/data'} + dataset_id = {"name": "unknown"} + dataset_info = {"file_key": "non/existing/data"} dataset = reader.get_dataset(dataset_id, dataset_info) assert dataset is None - @patch('satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt') + @patch("satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt") def test_get_dataset_does_not_calibrate_if_not_desired( self, mocked_calibrate, @@ -385,10 +385,10 @@ def test_get_dataset_does_not_calibrate_if_not_desired( dataset_info, ): """Test get dataset does not calibrate if not desired.""" - dataset_id = {'name': '1'} - dataset_info.pop('calibration') + dataset_id = {"name": "1"} + dataset_info.pop("calibration") dataset = reader.get_dataset(dataset_id, dataset_info) - assert dataset.dims == ('y', 'x') + assert dataset.dims == ("y", "x") mocked_calibrate.assert_not_called() assert isinstance(dataset, xr.DataArray) @@ -397,15 +397,15 @@ def test_get_dataset_orthorectifies_if_orthorect_data_defined( reader, ): """Test get dataset orthorectifies if orthorect data is defined.""" - dataset_id = {'name': 'lon_pixels_horn_1'} + dataset_id = {"name": "lon_pixels_horn_1"} dataset_info = { - 'name': 'lon_pixels_horn_1', - 'file_type': 'nc_ici_l1b_rad', - 'file_key': 'longitude', - 'orthorect_data': 'data/navigation_data/delta_longitude', - 'standard_name': 'longitude', - 'n_horns': 0, - 'modifiers': (), + "name": "lon_pixels_horn_1", + "file_type": "nc_ici_l1b_rad", + "file_key": "longitude", + "orthorect_data": "data/navigation_data/delta_longitude", + "standard_name": "longitude", + "n_horns": 0, + "modifiers": (), } dataset = reader.get_dataset(dataset_id, dataset_info) np.testing.assert_allclose(dataset, 1.009139, atol=1e-6) @@ -416,7 +416,7 @@ def test_get_dataset_handles_calibration( dataset_info, ): """Test get dataset handles calibration.""" - dataset_id = {'name': '1'} + dataset_id = {"name": "1"} dataset = reader.get_dataset(dataset_id, dataset_info) assert dataset.attrs["calibration"] == "brightness_temperature" np.testing.assert_allclose(dataset, 272.73734) @@ -426,15 +426,16 @@ def test_interpolate_returns_none_if_dataset_not_exist(self, reader): azimuth, zenith = reader._interpolate( InterpolationType.OBSERVATION_ANGLES ) - assert azimuth is None and zenith is None + assert azimuth is None + assert zenith is None - @patch('satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._interpolate_geo') + @patch("satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._interpolate_geo") def test_interpolate_calls_interpolate_geo(self, mock, reader): """Test interpolate calls interpolate_geo.""" reader._interpolate(InterpolationType.LONLAT) mock.assert_called_once() - @patch('satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._interpolate_viewing_angle') # noqa: E501 + @patch("satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._interpolate_viewing_angle") # noqa: E501 def test_interpolate_calls_interpolate_viewing_angles(self, mock, reader): """Test interpolate calls interpolate viewing_angles.""" reader._interpolate(InterpolationType.SOLAR_ANGLES) @@ -443,13 +444,13 @@ def test_interpolate_calls_interpolate_viewing_angles(self, mock, reader): def test_interpolate_geo(self, reader): """Test interpolate geographic coordinates.""" shape = (N_SCAN, N_SUBS, N_HORNS) - dims = ('n_scan', 'n_subs', 'n_horns') + dims = ("n_scan", "n_subs", "n_horns") longitude = xr.DataArray( 2. * np.ones(shape), dims=dims, coords={ - 'n_horns': np.arange(N_HORNS), - 'n_subs': np.arange(N_SUBS), + "n_horns": np.arange(N_HORNS), + "n_subs": np.arange(N_SUBS), }, ) latitude = xr.DataArray(np.ones(shape), dims=dims) @@ -467,13 +468,13 @@ def test_interpolate_geo(self, reader): def test_interpolate_viewing_angle(self, reader): """Test interpolate viewing angle.""" shape = (N_SCAN, N_SUBS, N_HORNS) - dims = ('n_scan', 'n_subs', 'n_horns') + dims = ("n_scan", "n_subs", "n_horns") azimuth = xr.DataArray( np.ones(shape), dims=dims, coords={ - 'n_horns': np.arange(N_HORNS), - 'n_subs': np.arange(N_SUBS), + "n_horns": np.arange(N_HORNS), + "n_subs": np.arange(N_SUBS), }, ) zenith = xr.DataArray(100. * np.ones(shape), dims=dims) @@ -492,11 +493,11 @@ def test_orthorectify(self, reader): """Test orthorectify.""" variable = xr.DataArray( np.ones((N_SCAN, N_SAMPLES, N_HORNS)), - dims=('y', 'x', 'n_horns'), - coords={'n_horns': np.arange(N_HORNS)} + dims=("y", "x", "n_horns"), + coords={"n_horns": np.arange(N_HORNS)} ) - variable = variable.sel({'n_horns': 0}) - orthorect_data_name = 'data/navigation_data/delta_longitude' + variable = variable.sel({"n_horns": 0}) + orthorect_data_name = "data/navigation_data/delta_longitude" orthorectified = reader._orthorectify( variable, orthorect_data_name, @@ -507,18 +508,18 @@ def test_get_global_attributes(self, reader): """Test get global attributes.""" attributes = reader._get_global_attributes() assert attributes == { - 'filename': reader.filename, - 'start_time': datetime(2000, 1, 2, 3, 4, 5), - 'end_time': datetime(2000, 1, 2, 4, 5, 6), - 'spacecraft_name': 'SGB', - 'ssp_lon': None, - 'sensor': 'ICI', - 'filename_start_time': datetime(2000, 1, 1, 1, 0), - 'filename_end_time': datetime(2000, 1, 1, 2, 0), - 'platform_name': 'SGB', - 'quality_group': { - 'duration_of_product': np.array(1000., dtype=np.float32), - 'overall_quality_flag': 0, + "filename": reader.filename, + "start_time": datetime(2000, 1, 2, 3, 4, 5), + "end_time": datetime(2000, 1, 2, 4, 5, 6), + "spacecraft_name": "SGB", + "ssp_lon": None, + "sensor": "ICI", + "filename_start_time": datetime(2000, 1, 1, 1, 0), + "filename_end_time": datetime(2000, 1, 1, 2, 0), + "platform_name": "SGB", + "quality_group": { + "duration_of_product": np.array(1000., dtype=np.float32), + "overall_quality_flag": 0, } } @@ -526,12 +527,12 @@ def test_get_quality_attributes(self, reader): """Test get quality attributes.""" attributes = reader._get_quality_attributes() assert attributes == { - 'duration_of_product': np.array(1000., dtype=np.float32), - 'overall_quality_flag': 0, + "duration_of_product": np.array(1000., dtype=np.float32), + "overall_quality_flag": 0, } @patch( - 'satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._get_global_attributes', + "satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._get_global_attributes", return_value={"mocked_global_attributes": True}, ) def test_manage_attributes(self, mock, reader): @@ -540,11 +541,11 @@ def test_manage_attributes(self, mock, reader): np.ones(N_SCAN), attrs={"season": "summer"}, ) - dataset_info = {'name': 'ici_1', 'units': 'K'} + dataset_info = {"name": "ici_1", "units": "K"} variable = reader._manage_attributes(variable, dataset_info) assert variable.attrs == { - 'season': 'summer', - 'units': 'K', - 'name': 'ici_1', - 'mocked_global_attributes': True, + "season": "summer", + "units": "K", + "name": "ici_1", + "mocked_global_attributes": True, } diff --git a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py index 1c22ef515d..1886560402 100644 --- a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py +++ b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py @@ -138,7 +138,7 @@ def test_insat3d_backend_has_1km_channels(insat_filename): assert res["IMG_SWIR"].shape == shape_1km -@pytest.mark.parametrize("resolution,name,shape,expected_values,expected_name,expected_units", +@pytest.mark.parametrize(("resolution", "name", "shape", "expected_values", "expected_name", "expected_units"), [(1000, "IMG_VIS_RADIANCE", shape_1km, mask_array(values_1km * 2), "Visible Radiance", rad_units), (1000, "IMG_VIS_ALBEDO", shape_1km, mask_array(values_1km * 3), @@ -173,7 +173,7 @@ def test_insat3d_has_dask_arrays(insat_filename): def test_insat3d_only_has_3_resolutions(insat_filename): """Test that we only accept 1000, 4000, 8000.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Resolution 1024 not available. Available resolutions: 1000, 4000, 8000"): _ = open_dataset(insat_filename, resolution=1024) @@ -208,7 +208,7 @@ def test_insat3d_datatree_has_global_attributes(insat_filename): assert res.attrs.keys() >= global_attrs.keys() -@pytest.mark.parametrize("calibration,expected_values", +@pytest.mark.parametrize(("calibration", "expected_values"), [("counts", values_1km), ("radiance", mask_array(values_1km * 2)), ("reflectance", mask_array(values_1km * 3))]) @@ -228,7 +228,7 @@ def test_filehandler_returns_masked_data_in_space(insat_filehandler): fh = insat_filehandler ds_info = None - ds_id = make_dataid(name="VIS", resolution=1000, calibration='reflectance') + ds_id = make_dataid(name="VIS", resolution=1000, calibration="reflectance") darr = fh.get_dataset(ds_id, ds_info) assert np.isnan(darr[0, 0]) @@ -238,7 +238,7 @@ def test_insat3d_has_orbital_parameters(insat_filehandler): fh = insat_filehandler ds_info = None - ds_id = make_dataid(name="VIS", resolution=1000, calibration='reflectance') + ds_id = make_dataid(name="VIS", resolution=1000, calibration="reflectance") darr = fh.get_dataset(ds_id, ds_info) assert "orbital_parameters" in darr.attrs diff --git a/satpy/tests/reader_tests/test_li_l2_nc.py b/satpy/tests/reader_tests/test_li_l2_nc.py index 40042aa1de..5e9d0ff563 100644 --- a/satpy/tests/reader_tests/test_li_l2_nc.py +++ b/satpy/tests/reader_tests/test_li_l2_nc.py @@ -43,9 +43,9 @@ def std_filetype_infos(): cfg = load_yaml_configs(cpaths[0]) # get the li_l2 filetype: - ftypes = cfg['file_types'] + ftypes = cfg["file_types"] - yield ftypes + return ftypes # Note: the helper class below has some missing abstract class implementation, @@ -69,31 +69,31 @@ def _test_dataset_single_variable(self, vname, desc, settings, handler): """Check the validity of a given variable.""" dname = vname - dims = settings.get('dimensions', {}) + dims = settings.get("dimensions", {}) - var_path = settings.get('variable_path', '') + var_path = settings.get("variable_path", "") # Compute shape from dimensions: - if desc['shape'] == (): + if desc["shape"] == (): # scalar case, dim should have been added in the code by validate_array_dimensions shape = (1,) else: - shape = tuple([dims[dim_name] for dim_name in desc['shape']]) + shape = tuple([dims[dim_name] for dim_name in desc["shape"]]) dataset_info = { - 'name': dname, - 'variable_name': vname, - 'use_rescaling': False, + "name": dname, + "variable_name": vname, + "use_rescaling": False, } var_params = [dataset_info, desc, dname, handler, shape, var_path] self._test_dataset_variable(var_params) def _test_dataset_variables(self, settings, ds_desc, handler): """Check the loading of the non in sector variables.""" - assert 'variables' in ds_desc - all_vars = ds_desc['variables'] + assert "variables" in ds_desc + all_vars = ds_desc["variables"] - variables = settings.get('variables') + variables = settings.get("variables") for vname, desc in variables.items(): # variable should be in list of dataset: assert vname in all_vars @@ -105,17 +105,17 @@ def _test_dataset_single_sector_variable(self, names, desc, settings, handler): dname = f"{vname}_{sname}_sector" - dims = settings.get('dimensions', {}) + dims = settings.get("dimensions", {}) - var_path = settings.get('variable_path', '') + var_path = settings.get("variable_path", "") - shape = tuple([dims[dim_name] for dim_name in desc['shape']]) + shape = tuple([dims[dim_name] for dim_name in desc["shape"]]) dataset_info = { - 'name': dname, - 'variable_name': vname, - 'sector_name': sname, - 'use_rescaling': False, + "name": dname, + "variable_name": vname, + "sector_name": sname, + "use_rescaling": False, } var_params = [dataset_info, desc, vname, handler, shape, var_path] self._test_dataset_variable(var_params, sname=sname) @@ -125,7 +125,7 @@ def _test_dataset_variable(self, var_params, sname=""): dataset_info, desc, dname, handler, shape, var_path = var_params res = self.get_variable_dataset(dataset_info, dname, handler) assert res.shape == shape - assert res.dims[0] == 'y' + assert res.dims[0] == "y" # Should retrieve content with fullname key: full_name = self.create_fullname_key(desc, var_path, dname, sname=sname) # Note: 'content' is not recognized as a valid member of the class below @@ -140,23 +140,23 @@ def get_variable_dataset(self, dataset_info, dname, handler): res = handler.get_dataset(dataset_id, dataset_info) return res - def create_fullname_key(self, desc, var_path, vname, sname=''): + def create_fullname_key(self, desc, var_path, vname, sname=""): """Create full name key for sector/non-sector content retrieval.""" - vpath = desc.get('path', var_path) - if vpath != "" and vpath[-1] != '/': - vpath += '/' + vpath = desc.get("path", var_path) + if vpath != "" and vpath[-1] != "/": + vpath += "/" if sname != "": - sname += '/' + sname += "/" full_name = f"{vpath}{sname}{vname}" return full_name def _test_dataset_sector_variables(self, settings, ds_desc, handler): """Check the loading of the in sector variables.""" - sector_vars = settings.get('sector_variables') - sectors = settings.get('sectors', ['north', 'east', 'south', 'west']) + sector_vars = settings.get("sector_variables") + sectors = settings.get("sectors", ["north", "east", "south", "west"]) - assert 'sector_variables' in ds_desc - all_vars = ds_desc['sector_variables'] + assert "sector_variables" in ds_desc + all_vars = ds_desc["sector_variables"] for sname in sectors: for vname, desc in sector_vars.items(): @@ -168,33 +168,33 @@ def test_dataset_loading(self, filetype_infos): """Test loading of all datasets from all products.""" # Iterate on all the available product types: for ptype, pinfo in products_dict.items(): - ftype = pinfo['ftype'] + ftype = pinfo["ftype"] filename_info = { - 'start_time': "0000", - 'end_time': "1000" + "start_time": "0000", + "end_time": "1000" } - handler = LIL2NCFileHandler('filename', filename_info, extract_filetype_info(filetype_infos, ftype)) + handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, ftype)) ds_desc = handler.ds_desc # retrieve the schema that what used to generate the content for that product: settings = get_product_schema(ptype) # Now we check all the variables are available: - if 'variables' in settings: + if "variables" in settings: self._test_dataset_variables(settings, ds_desc, handler) # check the sector variables: - if 'sector_variables' in settings: + if "sector_variables" in settings: self._test_dataset_sector_variables(settings, ds_desc, handler) def test_unregistered_dataset_loading(self, filetype_infos): """Test loading of an unregistered dataset.""" # Iterate on all the available product types: - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc")) - dataset_id = make_dataid(name='test_dataset') + dataset_id = make_dataid(name="test_dataset") with pytest.raises(KeyError): handler.get_dataset(dataset_id) @@ -202,22 +202,22 @@ def test_dataset_not_in_provided_dataset(self, filetype_infos): """Test loading of a dataset that is not provided.""" # Iterate on all the available product types: - dataset_dict = {'name': 'test_dataset'} + dataset_dict = {"name": "test_dataset"} - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc")) - dataset_id = make_dataid(name='test_dataset') + dataset_id = make_dataid(name="test_dataset") assert handler.get_dataset(dataset_id, ds_info=dataset_dict) is None def test_filename_infos(self, filetype_infos): """Test settings retrieved from filename.""" filename_info = { - 'start_time': "20101112131415", - 'end_time': "20101112131416" + "start_time": "20101112131415", + "end_time": "20101112131416" } - handler = LIL2NCFileHandler('filename', filename_info, extract_filetype_info(filetype_infos, 'li_l2_af_nc')) + handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, "li_l2_af_nc")) # Start and end time should come from filename info: assert handler.start_time == "20101112131415" @@ -236,19 +236,19 @@ def test_filename_infos(self, filetype_infos): assert len(handler.provided_datasets) > 0 # Sensor names should be just 'li' - assert handler.sensor_names == {'li'} + assert handler.sensor_names == {"li"} # check product type: - assert handler.product_type == '2-AF' + assert handler.product_type == "2-AF" def test_var_path_exists(self, filetype_infos): """Test variable_path_exists from li reader.""" filename_info = { - 'start_time': "20101112131415", - 'end_time': "20101112131416", + "start_time": "20101112131415", + "end_time": "20101112131416", } - handler = LIL2NCFileHandler('filename', filename_info, extract_filetype_info(filetype_infos, 'li_l2_lef_nc')) + handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, "li_l2_lef_nc")) # Check variable paths: assert handler.variable_path_exists("dummy") is False @@ -265,11 +265,11 @@ def test_var_path_exists(self, filetype_infos): def test_get_first_valid_variable(self, filetype_infos): """Test get_first_valid_variable from li reader.""" filename_info = { - 'start_time': "20101112131415", - 'end_time': "20101112131416", + "start_time": "20101112131415", + "end_time": "20101112131416", } - handler = LIL2NCFileHandler('filename', filename_info, extract_filetype_info(filetype_infos, 'li_l2_lef_nc')) + handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, "li_l2_lef_nc")) # Check variable paths: var1 = handler.get_first_valid_variable(["dummy/path", "data/north/event_id"]) @@ -311,30 +311,30 @@ def test_get_first_valid_variable(self, filetype_infos): assert id(meas2) == id(var3) # We should have a fill value on those variables: - assert var1.attrs.get('_FillValue') == 65535 - assert var2.attrs.get('_FillValue') == 65535 + assert var1.attrs.get("_FillValue") == 65535 + assert var2.attrs.get("_FillValue") == 65535 def test_get_first_valid_variable_not_found(self, filetype_infos): """Test get_first_valid_variable from li reader if the variable is not found.""" filename_info = { - 'start_time': "20101112131415", - 'end_time': "20101112131416", + "start_time": "20101112131415", + "end_time": "20101112131416", } - handler = LIL2NCFileHandler('filename', filename_info, extract_filetype_info(filetype_infos, 'li_l2_lef_nc')) + handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, "li_l2_lef_nc")) with pytest.raises(KeyError): handler.get_first_valid_variable(["dummy/path", "data/test/test_var"]) def test_available_datasets(self, filetype_infos): """Test available_datasets from li reader.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_lef_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_lef_nc")) # get current ds_infos. These should all be returned by the available_datasets ds_infos_to_compare = handler.dataset_infos.copy() # now add a dummy configured dataset to make sure that it is included in the available_datasets output - ds_info_dummy = {'test': 'test'} + ds_info_dummy = {"test": "test"} conf_ds_dummy = [(True, ds_info_dummy)] ds_infos_to_compare.insert(0, ds_info_dummy) @@ -343,11 +343,11 @@ def test_available_datasets(self, filetype_infos): def test_variable_scaling(self, filetype_infos): """Test automatic rescaling with offset and scale attributes.""" filename_info = { - 'start_time': "20101112131415", - 'end_time': "20101112131416" + "start_time": "20101112131415", + "end_time": "20101112131416" } - handler = LIL2NCFileHandler('filename', filename_info, extract_filetype_info(filetype_infos, 'li_l2_lfl_nc')) + handler = LIL2NCFileHandler("filename", filename_info, extract_filetype_info(filetype_infos, "li_l2_lfl_nc")) # Get the raw variable without rescaling: vname = "latitude" @@ -355,9 +355,9 @@ def test_variable_scaling(self, filetype_infos): # Get the dataset without rescaling: dataset_info = { - 'name': vname, - 'variable_name': vname, - 'use_rescaling': False, + "name": vname, + "variable_name": vname, + "use_rescaling": False, } dataset_id = make_dataid(name=vname) @@ -365,7 +365,7 @@ def test_variable_scaling(self, filetype_infos): assert np.all(lat_noscale.values == rawlat) # Now get the dataset with scaling: - dataset_info['use_rescaling'] = True + dataset_info["use_rescaling"] = True lat_scaled = handler.get_dataset(dataset_id, dataset_info) # By default we write data in the ranges [-88.3/0.0027, 88.3/0.0027] for latitude and longitude: @@ -374,12 +374,12 @@ def test_variable_scaling(self, filetype_infos): def test_swath_coordinates(self, filetype_infos): """Test that swath coordinates are used correctly to assign coordinates to some datasets.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_lfl_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_lfl_nc")) # Check latitude: dsid = make_dataid(name="latitude") dset = handler.get_dataset(dsid) - assert 'coordinates' not in dset.attrs + assert "coordinates" not in dset.attrs # get_area_def should raise exception: with pytest.raises(NotImplementedError): @@ -388,21 +388,21 @@ def test_swath_coordinates(self, filetype_infos): # Check radiance: dsid = make_dataid(name="radiance") dset = handler.get_dataset(dsid) - assert 'coordinates' in dset.attrs - assert dset.attrs['coordinates'][0] == "longitude" - assert dset.attrs['coordinates'][1] == "latitude" + assert "coordinates" in dset.attrs + assert dset.attrs["coordinates"][0] == "longitude" + assert dset.attrs["coordinates"][1] == "latitude" with pytest.raises(NotImplementedError): handler.get_area_def(dsid) def test_report_datetimes(self, filetype_infos): """Should report time variables as numpy datetime64 type and time durations as timedelta64.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_le_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_le_nc")) # Check epoch_time: dsid = make_dataid(name="epoch_time_north_sector") dset = handler.get_dataset(dsid) - assert dset.values.dtype == np.dtype('datetime64[ns]') + assert dset.values.dtype == np.dtype("datetime64[ns]") # The default epoch_time should be 1.234 seconds after epoch: ref_time = np.datetime64(datetime(2000, 1, 1, 0, 0, 1, 234000)) @@ -411,14 +411,14 @@ def test_report_datetimes(self, filetype_infos): # Check time_offset: dsid = make_dataid(name="time_offset_east_sector") dset = handler.get_dataset(dsid) - assert dset.values.dtype == np.dtype('timedelta64[ns]') + assert dset.values.dtype == np.dtype("timedelta64[ns]") # The default time_offset should be: np.linspace(0.0, 1000.0, nobs) # but then we first multiply by 1e6 to generate us times: # Note that below no automatic transform to np.float64 is happening: nobs = dset.shape[0] ref_data = np.linspace(0.0, 1000.0, nobs).astype(np.float32) - ref_data = (ref_data * 1e9).astype('timedelta64[ns]') + ref_data = (ref_data * 1e9).astype("timedelta64[ns]") # And not absolutely sure why, but we always get the timedelta in ns from the dataset: # ref_data = (ref_data).astype('timedelta64[ns]') @@ -427,33 +427,33 @@ def test_report_datetimes(self, filetype_infos): def test_milliseconds_to_timedelta(self, filetype_infos): """Should covert milliseconds to timedelta.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_lfl_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_lfl_nc")) # Check flash_duration: dsid = make_dataid(name="flash_duration") dset = handler.get_dataset(dsid) - assert dset.values.dtype == np.dtype('timedelta64[ns]') + assert dset.values.dtype == np.dtype("timedelta64[ns]") nobs = dset.shape[0] - ref_data = np.linspace(0, 1000, nobs).astype('u2') - ref_data = (ref_data * 1e6).astype('timedelta64[ns]') + ref_data = np.linspace(0, 1000, nobs).astype("u2") + ref_data = (ref_data * 1e6).astype("timedelta64[ns]") assert np.all(dset.values == ref_data) def test_apply_accumulate_index_offset(self, filetype_infos): """Should accumulate index offsets.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_le_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_le_nc")) # Check time offset: dsid = make_dataid(name="l1b_chunk_offsets_north_sector") dset = handler.get_dataset(dsid) nobs = dset.shape[0] - ref_data = (np.arange(nobs)).astype('u4') + ref_data = (np.arange(nobs)).astype("u4") # check first execution without offset assert np.all(dset.values == ref_data) # check that the offset is being stored - assert handler.current_ds_info['__index_offset'] == 123 + assert handler.current_ds_info["__index_offset"] == 123 # check execution with offset value # this simulates the case where we are loading this variable from multiple files and concatenating it @@ -462,62 +462,62 @@ def test_apply_accumulate_index_offset(self, filetype_infos): def test_combine_info(self, filetype_infos): """Test overridden combine_info.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_le_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_le_nc")) # get a dataset including the index_offset in the ds_info dsid = make_dataid(name="l1b_chunk_offsets_north_sector") - ds_info = {'name': 'l1b_chunk_offsets_north_sector', - 'variable_name': 'l1b_chunk_offsets', - 'sector_name': 'north', - '__index_offset': 1000, - 'accumulate_index_offset': "{sector_name}/l1b_window"} + ds_info = {"name": "l1b_chunk_offsets_north_sector", + "variable_name": "l1b_chunk_offsets", + "sector_name": "north", + "__index_offset": 1000, + "accumulate_index_offset": "{sector_name}/l1b_window"} dset = handler.get_dataset(dsid, ds_info=ds_info) handler.combine_info([dset.attrs]) # combine_info should have removed the index_offset key from the ds_info passed to get_dataset - assert '__index_offset' not in ds_info + assert "__index_offset" not in ds_info # and reset the current_ds_info dict, in order to avoid failures if we call combine_info again assert handler.current_ds_info is None def test_coordinates_projection(self, filetype_infos): """Should automatically generate lat/lon coords from projection data.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc")) dsid = make_dataid(name="flash_accumulation") dset = handler.get_dataset(dsid) - assert 'coordinates' in dset.attrs + assert "coordinates" in dset.attrs - assert dset.attrs['coordinates'][0] == "longitude" - assert dset.attrs['coordinates'][1] == "latitude" + assert dset.attrs["coordinates"][0] == "longitude" + assert dset.attrs["coordinates"][1] == "latitude" with pytest.raises(NotImplementedError): handler.get_area_def(dsid) - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_afr_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_afr_nc")) dsid = make_dataid(name="flash_radiance") dset = handler.get_dataset(dsid) - assert 'coordinates' in dset.attrs + assert "coordinates" in dset.attrs - assert dset.attrs['coordinates'][0] == "longitude" - assert dset.attrs['coordinates'][1] == "latitude" + assert dset.attrs["coordinates"][0] == "longitude" + assert dset.attrs["coordinates"][1] == "latitude" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_afa_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_afa_nc")) dsid = make_dataid(name="accumulated_flash_area") dset = handler.get_dataset(dsid) - assert 'coordinates' in dset.attrs + assert "coordinates" in dset.attrs - assert dset.attrs['coordinates'][0] == "longitude" - assert dset.attrs['coordinates'][1] == "latitude" + assert dset.attrs["coordinates"][0] == "longitude" + assert dset.attrs["coordinates"][1] == "latitude" def test_generate_coords_on_accumulated_prods(self, filetype_infos): """Test daskified generation of coords.""" - accumulated_products = ['li_l2_af_nc', 'li_l2_afr_nc', 'li_l2_afa_nc'] - coordinate_datasets = ['longitude', 'latitude'] + accumulated_products = ["li_l2_af_nc", "li_l2_afr_nc", "li_l2_afa_nc"] + coordinate_datasets = ["longitude", "latitude"] for accum_prod in accumulated_products: for ds_name in coordinate_datasets: - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, accum_prod)) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, accum_prod)) dsid = make_dataid(name=ds_name) dset = handler.get_dataset(dsid) # Check dataset type @@ -527,12 +527,12 @@ def test_generate_coords_on_accumulated_prods(self, filetype_infos): def test_generate_coords_on_lon_lat(self, filetype_infos): """Test getting lon/lat dataset on accumulated product.""" - accumulated_products = ['li_l2_af_nc', 'li_l2_afr_nc', 'li_l2_afa_nc'] - coordinate_datasets = ['longitude', 'latitude'] + accumulated_products = ["li_l2_af_nc", "li_l2_afr_nc", "li_l2_afa_nc"] + coordinate_datasets = ["longitude", "latitude"] for accum_prod in accumulated_products: for ds_name in coordinate_datasets: - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, accum_prod)) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, accum_prod)) dsid = make_dataid(name=ds_name) handler.generate_coords_from_scan_angles = mock.MagicMock( side_effect=handler.generate_coords_from_scan_angles) @@ -541,12 +541,12 @@ def test_generate_coords_on_lon_lat(self, filetype_infos): def test_generate_coords_inverse_proj(self, filetype_infos): """Test inverse_projection execution delayed until .values is called on the dataset.""" - accumulated_products = ['li_l2_af_nc', 'li_l2_afr_nc', 'li_l2_afa_nc'] - coordinate_datasets = ['longitude', 'latitude'] + accumulated_products = ["li_l2_af_nc", "li_l2_afr_nc", "li_l2_afa_nc"] + coordinate_datasets = ["longitude", "latitude"] for accum_prod in accumulated_products: for ds_name in coordinate_datasets: - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, accum_prod)) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, accum_prod)) dsid = make_dataid(name=ds_name) handler.inverse_projection = mock.MagicMock(side_effect=handler.inverse_projection) dset = handler.get_dataset(dsid) @@ -557,17 +557,17 @@ def test_generate_coords_inverse_proj(self, filetype_infos): def test_generate_coords_not_called_on_non_coord_dataset(self, filetype_infos): """Test that the method is not called when getting non-coord dataset.""" - handler = self.generate_coords(filetype_infos, 'li_l2_af_nc', 'flash_accumulation') + handler = self.generate_coords(filetype_infos, "li_l2_af_nc", "flash_accumulation") assert not handler.generate_coords_from_scan_angles.called def test_generate_coords_not_called_on_non_accum_dataset(self, filetype_infos): """Test that the method is not called when getting non-accum dataset.""" - handler = self.generate_coords(filetype_infos, 'li_l2_lef_nc', 'latitude_north_sector') + handler = self.generate_coords(filetype_infos, "li_l2_lef_nc", "latitude_north_sector") assert not handler.generate_coords_from_scan_angles.called def generate_coords(self, filetype_infos, file_type_name, variable_name): """Generate file handler and mimic coordinate generator call.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, file_type_name)) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, file_type_name)) dsid = make_dataid(name=variable_name) handler.generate_coords_from_scan_angles = mock.MagicMock( side_effect=handler.generate_coords_from_scan_angles) @@ -576,10 +576,10 @@ def generate_coords(self, filetype_infos, file_type_name, variable_name): def test_generate_coords_called_once(Self, filetype_infos): """Test that the method is called only once.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc')) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc")) # check internal variable is empty assert len(handler.internal_variables) == 0 - coordinate_datasets = ['longitude', 'latitude'] + coordinate_datasets = ["longitude", "latitude"] handler.generate_coords_from_scan_angles = mock.MagicMock(side_effect=handler.generate_coords_from_scan_angles) for ds_name in coordinate_datasets: @@ -593,34 +593,34 @@ def test_generate_coords_called_once(Self, filetype_infos): def test_coords_generation(self, filetype_infos): """Compare daskified coords generation results with non-daskified.""" # Prepare dummy (but somewhat realistic) arrays of azimuth/elevation values. - products = ['li_l2_af_nc', - 'li_l2_afr_nc', - 'li_l2_afa_nc'] + products = ["li_l2_af_nc", + "li_l2_afr_nc", + "li_l2_afa_nc"] for prod in products: - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, prod)) + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, prod)) # Get azimuth/elevation arrays from handler - azimuth = handler.get_measured_variable(handler.swath_coordinates['azimuth']) + azimuth = handler.get_measured_variable(handler.swath_coordinates["azimuth"]) azimuth = handler.apply_use_rescaling(azimuth) - elevation = handler.get_measured_variable(handler.swath_coordinates['elevation']) + elevation = handler.get_measured_variable(handler.swath_coordinates["elevation"]) elevation = handler.apply_use_rescaling(elevation) # Initialize proj_dict - proj_var = handler.swath_coordinates['projection'] + proj_var = handler.swath_coordinates["projection"] geos_proj = handler.get_measured_variable(proj_var, fill_value=None) major_axis = float(geos_proj.attrs["semi_major_axis"]) point_height = 35786400.0 # float(geos_proj.attrs["perspective_point_height"]) inv_flattening = float(geos_proj.attrs["inverse_flattening"]) lon_0 = float(geos_proj.attrs["longitude_of_projection_origin"]) sweep = str(geos_proj.attrs["sweep_angle_axis"]) - proj_dict = {'a': major_axis, - 'lon_0': lon_0, - 'h': point_height, + proj_dict = {"a": major_axis, + "lon_0": lon_0, + "h": point_height, "rf": inv_flattening, - 'proj': 'geos', - 'units': 'm', + "proj": "geos", + "units": "m", "sweep": sweep} # Compute reference values @@ -633,8 +633,8 @@ def test_coords_generation(self, filetype_infos): lat_ref = lat_ref.astype(np.float32) handler.generate_coords_from_scan_angles() - lon = handler.internal_variables['longitude'].values - lat = handler.internal_variables['latitude'].values + lon = handler.internal_variables["longitude"].values + lat = handler.internal_variables["latitude"].values # Compare the arrays, should be the same: np.testing.assert_equal(lon, lon_ref) @@ -642,7 +642,7 @@ def test_coords_generation(self, filetype_infos): def test_get_area_def_acc_products(self, filetype_infos): """Test retrieval of area def for accumulated products.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc'), + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), with_area_definition=True) dsid = make_dataid(name="flash_accumulation") @@ -656,7 +656,7 @@ def test_get_area_def_acc_products(self, filetype_infos): def test_get_area_def_non_acc_products(self, filetype_infos): """Test retrieval of area def for non-accumulated products.""" - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_lgr_nc'), + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_lgr_nc"), with_area_definition=True) # Should throw for non-accum products: with pytest.raises(NotImplementedError): @@ -672,9 +672,9 @@ def write_flash_accum(_vname, _ocname, _settings): # We return the settings we want to use here to generate our custom/fixed product content: return { - 'num_obs': 1234, - 'providers': { - 'flash_accumulation': write_flash_accum, + "num_obs": 1234, + "providers": { + "flash_accumulation": write_flash_accum, } } @@ -682,7 +682,7 @@ def test_without_area_def(self, filetype_infos): """Test accumulated products data array without area definition.""" # without area definition handler_without_area_def = LIL2NCFileHandler( - 'filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc'), with_area_definition=False) + "filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), with_area_definition=False) dsid = make_dataid(name="flash_accumulation") @@ -692,7 +692,7 @@ def test_without_area_def(self, filetype_infos): def test_with_area_def(self, filetype_infos): """Test accumulated products data array with area definition.""" - handler = self.handler_with_area(filetype_infos, 'li_l2_af_nc') + handler = self.handler_with_area(filetype_infos, "li_l2_af_nc") dsid = make_dataid(name="flash_accumulation") # Retrieve the 2D array: arr = handler.get_dataset(dsid).values @@ -700,7 +700,7 @@ def test_with_area_def(self, filetype_infos): def test_get_on_fci_grid_exc(self, filetype_infos): """Test the execution of the get_on_fci_grid function for an accumulated gridded variable.""" - handler = self.handler_with_area(filetype_infos, 'li_l2_af_nc') + handler = self.handler_with_area(filetype_infos, "li_l2_af_nc") handler.get_array_on_fci_grid = mock.MagicMock(side_effect=handler.get_array_on_fci_grid) dsid = make_dataid(name="flash_accumulation") handler.get_dataset(dsid) @@ -708,7 +708,7 @@ def test_get_on_fci_grid_exc(self, filetype_infos): def test_get_on_fci_grid_exc_non_grid(self, filetype_infos): """Test the non-execution of the get_on_fci_grid function for an accumulated non-gridded variable.""" - handler = self.handler_with_area(filetype_infos, 'li_l2_af_nc') + handler = self.handler_with_area(filetype_infos, "li_l2_af_nc") handler.get_array_on_fci_grid = mock.MagicMock(side_effect=handler.get_array_on_fci_grid) dsid = make_dataid(name="accumulation_offsets") handler.get_dataset(dsid) @@ -716,7 +716,7 @@ def test_get_on_fci_grid_exc_non_grid(self, filetype_infos): def test_get_on_fci_grid_exc_non_accum(self, filetype_infos): """Test the non-execution of the get_on_fci_grid function for a non-accumulated variable.""" - handler = self.handler_with_area(filetype_infos, 'li_l2_lef_nc') + handler = self.handler_with_area(filetype_infos, "li_l2_lef_nc") handler.get_array_on_fci_grid = mock.MagicMock(side_effect=handler.get_array_on_fci_grid) dsid = make_dataid(name="radiance_north_sector") handler.get_dataset(dsid) @@ -724,7 +724,7 @@ def test_get_on_fci_grid_exc_non_accum(self, filetype_infos): def test_with_area_def_vars_with_no_pattern(self, filetype_infos): """Test accumulated products variable with no patterns and with area definition.""" - handler = self.handler_with_area(filetype_infos, 'li_l2_af_nc') + handler = self.handler_with_area(filetype_infos, "li_l2_af_nc") # variable with no patterns dsid = make_dataid(name="accumulation_offsets") assert handler.get_dataset(dsid).shape == (1,) @@ -734,7 +734,7 @@ def handler_with_area(self, filetype_infos, product_name): # Note: we need a test param provider here to ensure we write the same values for both handlers below: FakeLIFileHandlerBase.schema_parameters = TestLIL2.param_provider # with area definition - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, product_name), + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, product_name), with_area_definition=True) return handler @@ -743,7 +743,7 @@ def test_with_area_def_pixel_placement(self, filetype_infos): # with area definition FakeLIFileHandlerBase.schema_parameters = TestLIL2.param_provider - handler = LIL2NCFileHandler('filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc'), + handler = LIL2NCFileHandler("filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), with_area_definition=True) dsid = make_dataid(name="flash_accumulation") @@ -751,11 +751,11 @@ def test_with_area_def_pixel_placement(self, filetype_infos): arr = handler.get_dataset(dsid).values # Retrieve the x/y coordinates: - xarr = handler.get_measured_variable('x').values.astype(int) - yarr = handler.get_measured_variable('y').values.astype(int) + xarr = handler.get_measured_variable("x").values.astype(int) + yarr = handler.get_measured_variable("y").values.astype(int) handler_without_area_def = LIL2NCFileHandler( - 'filename', {}, extract_filetype_info(filetype_infos, 'li_l2_af_nc'), with_area_definition=False) + "filename", {}, extract_filetype_info(filetype_infos, "li_l2_af_nc"), with_area_definition=False) FakeLIFileHandlerBase.schema_parameters = None diff --git a/satpy/tests/reader_tests/test_meris_nc.py b/satpy/tests/reader_tests/test_meris_nc.py index 926eccc672..b4a2cda809 100644 --- a/satpy/tests/reader_tests/test_meris_nc.py +++ b/satpy/tests/reader_tests/test_meris_nc.py @@ -21,49 +21,49 @@ class TestMERISReader(unittest.TestCase): """Test various meris_nc_sen3 filehandlers.""" - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_instantiate(self, mocked_dataset): """Test initialization of file handlers.""" from satpy.readers.meris_nc_sen3 import NCMERIS2, NCMERISCal, NCMERISGeo from satpy.tests.utils import make_dataid - ds_id = make_dataid(name='M01', calibration='reflectance') - ds_id2 = make_dataid(name='wsqf', calibration='reflectance') - filename_info = {'mission_id': 'ENV', 'dataset_name': 'M01', 'start_time': 0, 'end_time': 0} + ds_id = make_dataid(name="M01", calibration="reflectance") + ds_id2 = make_dataid(name="wsqf", calibration="reflectance") + filename_info = {"mission_id": "ENV", "dataset_name": "M01", "start_time": 0, "end_time": 0} - test = NCMERISCal('somedir/somefile.nc', filename_info, 'c') + test = NCMERISCal("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - test = NCMERISGeo('somedir/somefile.nc', filename_info, 'c') + test = NCMERISGeo("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - test = NCMERIS2('somedir/somefile.nc', filename_info, 'c') - test.get_dataset(ds_id, {'nc_key': 'the_key'}) - test.get_dataset(ds_id2, {'nc_key': 'the_key'}) + test = NCMERIS2("somedir/somefile.nc", filename_info, "c") + test.get_dataset(ds_id, {"nc_key": "the_key"}) + test.get_dataset(ds_id2, {"nc_key": "the_key"}) mocked_dataset.assert_called() mocked_dataset.reset_mock() - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_open_file_objects(self, mocked_open_dataset): """Test initialization of file handlers.""" from satpy.readers.olci_nc import NCOLCIBase - filename_info = {'mission_id': 'ENV', 'dataset_name': 'M01', 'start_time': 0, 'end_time': 0} + filename_info = {"mission_id": "ENV", "dataset_name": "M01", "start_time": 0, "end_time": 0} open_file = mock.MagicMock() - file_handler = NCOLCIBase(open_file, filename_info, 'c') + file_handler = NCOLCIBase(open_file, filename_info, "c") # deepcode ignore W0104: This is a property that is actually a function call. file_handler.nc # pylint: disable=W0104 mocked_open_dataset.assert_called() open_file.open.assert_called() assert (open_file.open.return_value in mocked_open_dataset.call_args[0] or - open_file.open.return_value == mocked_open_dataset.call_args[1].get('filename_or_obj')) + open_file.open.return_value == mocked_open_dataset.call_args[1].get("filename_or_obj")) - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_get_dataset(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -71,17 +71,17 @@ def test_get_dataset(self, mocked_dataset): from satpy.readers.meris_nc_sen3 import NCMERIS2 from satpy.tests.utils import make_dataid - mocked_dataset.return_value = xr.Dataset({'mask': (['rows', 'columns'], + mocked_dataset.return_value = xr.Dataset({"mask": (["rows", "columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))}, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}) - ds_id = make_dataid(name='mask') - filename_info = {'mission_id': 'ENV', 'dataset_name': 'mask', 'start_time': 0, 'end_time': 0} - test = NCMERIS2('somedir/somefile.nc', filename_info, 'c') - res = test.get_dataset(ds_id, {'nc_key': 'mask'}) - self.assertEqual(res.dtype, np.dtype('bool')) - - @mock.patch('xarray.open_dataset') + coords={"rows": np.arange(5), + "columns": np.arange(6)}) + ds_id = make_dataid(name="mask") + filename_info = {"mission_id": "ENV", "dataset_name": "mask", "start_time": 0, "end_time": 0} + test = NCMERIS2("somedir/somefile.nc", filename_info, "c") + res = test.get_dataset(ds_id, {"nc_key": "mask"}) + assert res.dtype == np.dtype("bool") + + @mock.patch("xarray.open_dataset") def test_meris_angles(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -90,31 +90,31 @@ def test_meris_angles(self, mocked_dataset): from satpy.readers.meris_nc_sen3 import NCMERISAngles from satpy.tests.utils import make_dataid attr_dict = { - 'ac_subsampling_factor': 1, - 'al_subsampling_factor': 2, + "ac_subsampling_factor": 1, + "al_subsampling_factor": 2, } - mocked_dataset.return_value = xr.Dataset({'SAA': (['tie_rows', 'tie_columns'], + mocked_dataset.return_value = xr.Dataset({"SAA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'SZA': (['tie_rows', 'tie_columns'], + "SZA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'OAA': (['tie_rows', 'tie_columns'], + "OAA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'OZA': (['tie_rows', 'tie_columns'], + "OZA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))}, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}, + coords={"rows": np.arange(5), + "columns": np.arange(6)}, attrs=attr_dict) - filename_info = {'mission_id': 'ENV', 'dataset_name': 'M01', 'start_time': 0, 'end_time': 0} + filename_info = {"mission_id": "ENV", "dataset_name": "M01", "start_time": 0, "end_time": 0} - ds_id = make_dataid(name='solar_azimuth_angle') - ds_id2 = make_dataid(name='satellite_zenith_angle') - test = NCMERISAngles('somedir/somefile.nc', filename_info, 'c') + ds_id = make_dataid(name="solar_azimuth_angle") + ds_id2 = make_dataid(name="satellite_zenith_angle") + test = NCMERISAngles("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) test.get_dataset(ds_id2, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_meris_meteo(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -123,26 +123,26 @@ def test_meris_meteo(self, mocked_dataset): from satpy.readers.meris_nc_sen3 import NCMERISMeteo from satpy.tests.utils import make_dataid attr_dict = { - 'ac_subsampling_factor': 1, - 'al_subsampling_factor': 2, + "ac_subsampling_factor": 1, + "al_subsampling_factor": 2, } - data = {'humidity': (['tie_rows', 'tie_columns'], + data = {"humidity": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'total_ozone': (['tie_rows', 'tie_columns'], + "total_ozone": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'sea_level_pressure': (['tie_rows', 'tie_columns'], + "sea_level_pressure": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'total_columnar_water_vapour': (['tie_rows', 'tie_columns'], + "total_columnar_water_vapour": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))} mocked_dataset.return_value = xr.Dataset(data, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}, + coords={"rows": np.arange(5), + "columns": np.arange(6)}, attrs=attr_dict) - filename_info = {'mission_id': 'ENV', 'dataset_name': 'humidity', 'start_time': 0, 'end_time': 0} + filename_info = {"mission_id": "ENV", "dataset_name": "humidity", "start_time": 0, "end_time": 0} - ds_id = make_dataid(name='humidity') - ds_id2 = make_dataid(name='total_ozone') - test = NCMERISMeteo('somedir/somefile.nc', filename_info, 'c') + ds_id = make_dataid(name="humidity") + ds_id2 = make_dataid(name="total_ozone") + test = NCMERISMeteo("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) test.get_dataset(ds_id2, filename_info) mocked_dataset.assert_called() @@ -160,24 +160,24 @@ def test_bitflags(self): from satpy.readers.olci_nc import BitFlags - flag_list = ['SEA_ICE', 'MEGLINT', 'HIGHGLINT', 'CASE2_S', 'CASE2_ANOM', - 'HAZE_OVER_WATER', 'WHITECAPS', 'AC_FAIL', 'BPAC_ON', 'WHITE_SCATT', - 'LOWRW', 'HIGHRW', 'OUT_OF_RANGE_AAC', 'OUT_OF_SCOPE_AAC', - 'OUT_OF_RANGE_OC_NN', 'OUT_OF_SCOPE_OC_NN', - 'OUT_OF_RANGE_CHL_OC4ME_INPUT', 'OUT_OF_RANGE_CHL_OC4ME'] + flag_list = ["SEA_ICE", "MEGLINT", "HIGHGLINT", "CASE2_S", "CASE2_ANOM", + "HAZE_OVER_WATER", "WHITECAPS", "AC_FAIL", "BPAC_ON", "WHITE_SCATT", + "LOWRW", "HIGHRW", "OUT_OF_RANGE_AAC", "OUT_OF_SCOPE_AAC", + "OUT_OF_RANGE_OC_NN", "OUT_OF_SCOPE_OC_NN", + "OUT_OF_RANGE_CHL_OC4ME_INPUT", "OUT_OF_RANGE_CHL_OC4ME"] bits = np.array([1 << x for x in range(len(flag_list))]) bflags = BitFlags(bits, flag_list=flag_list) - items = ['SEA_ICE', 'MEGLINT', 'HIGHGLINT', - 'HAZE_OVER_WATER', 'WHITECAPS', 'AC_FAIL', 'WHITE_SCATT', - 'LOWRW', 'HIGHRW', 'OUT_OF_RANGE_AAC', 'OUT_OF_SCOPE_AAC', - 'OUT_OF_RANGE_OC_NN', 'OUT_OF_SCOPE_OC_NN', - 'OUT_OF_RANGE_CHL_OC4ME_INPUT', 'OUT_OF_RANGE_CHL_OC4ME'] + items = ["SEA_ICE", "MEGLINT", "HIGHGLINT", + "HAZE_OVER_WATER", "WHITECAPS", "AC_FAIL", "WHITE_SCATT", + "LOWRW", "HIGHRW", "OUT_OF_RANGE_AAC", "OUT_OF_SCOPE_AAC", + "OUT_OF_RANGE_OC_NN", "OUT_OF_SCOPE_OC_NN", + "OUT_OF_RANGE_CHL_OC4ME_INPUT", "OUT_OF_RANGE_CHL_OC4ME"] mask = reduce(np.logical_or, [bflags[item] for item in items]) expected = np.array([True, True, True, False, False, True, True, True, False, True, True, True, True, True, True, True, True, True]) - self.assertTrue(all(mask == expected)) + assert all(mask == expected) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index acccb7a28d..1df0d41f12 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -29,185 +29,185 @@ def _get_calibration(num_scans): calibration = { - 'Calibration/VIS_Cal_Coeff': + "Calibration/VIS_Cal_Coeff": xr.DataArray( da.ones((19, 3), chunks=1024), - attrs={'Slope': np.array([1.] * 19), 'Intercept': np.array([0.] * 19)}, - dims=('_bands', '_coeffs')), - 'Calibration/IR_Cal_Coeff': + attrs={"Slope": np.array([1.] * 19), "Intercept": np.array([0.] * 19)}, + dims=("_bands", "_coeffs")), + "Calibration/IR_Cal_Coeff": xr.DataArray( da.ones((6, 4, num_scans), chunks=1024), - attrs={'Slope': np.array([1.] * 6), 'Intercept': np.array([0.] * 6)}, - dims=('_bands', '_coeffs', '_scans')), + attrs={"Slope": np.array([1.] * 6), "Intercept": np.array([0.] * 6)}, + dims=("_bands", "_coeffs", "_scans")), } return calibration def _get_250m_data(num_scans, rows_per_scan, num_cols): # Set some default attributes - def_attrs = {'FillValue': 65535, - 'valid_range': [0, 4095], - 'Slope': np.array([1.] * 1), 'Intercept': np.array([0.] * 1) + def_attrs = {"FillValue": 65535, + "valid_range": [0, 4095], + "Slope": np.array([1.] * 1), "Intercept": np.array([0.] * 1) } - nounits_attrs = {**def_attrs, **{'units': 'NO'}} - radunits_attrs = {**def_attrs, **{'units': 'mW/ (m2 cm-1 sr)'}} + nounits_attrs = {**def_attrs, **{"units": "NO"}} + radunits_attrs = {**def_attrs, **{"units": "mW/ (m2 cm-1 sr)"}} data = { - 'Data/EV_250_RefSB_b1': + "Data/EV_250_RefSB_b1": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, - dims=('_rows', '_cols')), - 'Data/EV_250_RefSB_b2': + dims=("_rows", "_cols")), + "Data/EV_250_RefSB_b2": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, - dims=('_rows', '_cols')), - 'Data/EV_250_RefSB_b3': + dims=("_rows", "_cols")), + "Data/EV_250_RefSB_b3": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, - dims=('_rows', '_cols')), - 'Data/EV_250_RefSB_b4': + dims=("_rows", "_cols")), + "Data/EV_250_RefSB_b4": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=nounits_attrs, - dims=('_rows', '_cols')), - 'Data/EV_250_Emissive_b24': + dims=("_rows", "_cols")), + "Data/EV_250_Emissive_b24": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=radunits_attrs, - dims=('_rows', '_cols')), - 'Data/EV_250_Emissive_b25': + dims=("_rows", "_cols")), + "Data/EV_250_Emissive_b25": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=radunits_attrs, - dims=('_rows', '_cols')), + dims=("_rows", "_cols")), } return data def _get_1km_data(num_scans, rows_per_scan, num_cols): data = { - 'Data/EV_1KM_LL': + "Data/EV_1KM_LL": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'Slope': np.array([1.]), 'Intercept': np.array([0.]), - 'FillValue': 65535, - 'units': 'NO', - 'valid_range': [0, 4095], - 'long_name': b'1km Earth View Science Data', + "Slope": np.array([1.]), "Intercept": np.array([0.]), + "FillValue": 65535, + "units": "NO", + "valid_range": [0, 4095], + "long_name": b"1km Earth View Science Data", }, - dims=('_rows', '_cols')), - 'Data/EV_1KM_RefSB': + dims=("_rows", "_cols")), + "Data/EV_1KM_RefSB": xr.DataArray( da.ones((15, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'Slope': np.array([1.] * 15), 'Intercept': np.array([0.] * 15), - 'FillValue': 65535, - 'units': 'NO', - 'valid_range': [0, 4095], - 'long_name': b'1km Earth View Science Data', + "Slope": np.array([1.] * 15), "Intercept": np.array([0.] * 15), + "FillValue": 65535, + "units": "NO", + "valid_range": [0, 4095], + "long_name": b"1km Earth View Science Data", }, - dims=('_ref_bands', '_rows', '_cols')), - 'Data/EV_1KM_Emissive': + dims=("_ref_bands", "_rows", "_cols")), + "Data/EV_1KM_Emissive": xr.DataArray( da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'Slope': np.array([1.] * 4), 'Intercept': np.array([0.] * 4), - 'FillValue': 65535, - 'units': 'mW/ (m2 cm-1 sr)', - 'valid_range': [0, 25000], - 'long_name': b'1km Emissive Bands Earth View ' - b'Science Data', + "Slope": np.array([1.] * 4), "Intercept": np.array([0.] * 4), + "FillValue": 65535, + "units": "mW/ (m2 cm-1 sr)", + "valid_range": [0, 25000], + "long_name": b"1km Emissive Bands Earth View " + b"Science Data", }, - dims=('_ir_bands', '_rows', '_cols')), - 'Data/EV_250_Aggr.1KM_RefSB': + dims=("_ir_bands", "_rows", "_cols")), + "Data/EV_250_Aggr.1KM_RefSB": xr.DataArray( da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'Slope': np.array([1.] * 4), 'Intercept': np.array([0.] * 4), - 'FillValue': 65535, - 'units': 'NO', - 'valid_range': [0, 4095], - 'long_name': b'250m Reflective Bands Earth View ' - b'Science Data Aggregated to 1 km' + "Slope": np.array([1.] * 4), "Intercept": np.array([0.] * 4), + "FillValue": 65535, + "units": "NO", + "valid_range": [0, 4095], + "long_name": b"250m Reflective Bands Earth View " + b"Science Data Aggregated to 1 km" }, - dims=('_ref250_bands', '_rows', '_cols')), - 'Data/EV_250_Aggr.1KM_Emissive': + dims=("_ref250_bands", "_rows", "_cols")), + "Data/EV_250_Aggr.1KM_Emissive": xr.DataArray( da.ones((2, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'Slope': np.array([1.] * 2), 'Intercept': np.array([0.] * 2), - 'FillValue': 65535, - 'units': 'mW/ (m2 cm-1 sr)', - 'valid_range': [0, 4095], - 'long_name': b'250m Emissive Bands Earth View ' - b'Science Data Aggregated to 1 km' + "Slope": np.array([1.] * 2), "Intercept": np.array([0.] * 2), + "FillValue": 65535, + "units": "mW/ (m2 cm-1 sr)", + "valid_range": [0, 4095], + "long_name": b"250m Emissive Bands Earth View " + b"Science Data Aggregated to 1 km" }, - dims=('_ir250_bands', '_rows', '_cols')), + dims=("_ir250_bands", "_rows", "_cols")), } return data def _get_250m_ll_data(num_scans, rows_per_scan, num_cols): # Set some default attributes - def_attrs = {'FillValue': 65535, - 'valid_range': [0, 4095], - 'Slope': np.array([1.]), 'Intercept': np.array([0.]), - 'long_name': b'250m Earth View Science Data', - 'units': 'mW/ (m2 cm-1 sr)', + def_attrs = {"FillValue": 65535, + "valid_range": [0, 4095], + "Slope": np.array([1.]), "Intercept": np.array([0.]), + "long_name": b"250m Earth View Science Data", + "units": "mW/ (m2 cm-1 sr)", } data = { - 'Data/EV_250_Emissive_b6': + "Data/EV_250_Emissive_b6": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=def_attrs, - dims=('_rows', '_cols')), - 'Data/EV_250_Emissive_b7': + dims=("_rows", "_cols")), + "Data/EV_250_Emissive_b7": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs=def_attrs, - dims=('_rows', '_cols')), + dims=("_rows", "_cols")), } return data def _get_geo_data(num_scans, rows_per_scan, num_cols, prefix): geo = { - prefix + 'Longitude': + prefix + "Longitude": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), attrs={ - 'Slope': np.array([1.] * 1), 'Intercept': np.array([0.] * 1), - 'units': 'degree', - 'valid_range': [-90, 90], + "Slope": np.array([1.] * 1), "Intercept": np.array([0.] * 1), + "units": "degree", + "valid_range": [-90, 90], }, - dims=('_rows', '_cols')), - prefix + 'Latitude': + dims=("_rows", "_cols")), + prefix + "Latitude": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), attrs={ - 'Slope': np.array([1.] * 1), 'Intercept': np.array([0.] * 1), - 'units': 'degree', - 'valid_range': [-180, 180], + "Slope": np.array([1.] * 1), "Intercept": np.array([0.] * 1), + "units": "degree", + "valid_range": [-180, 180], }, - dims=('_rows', '_cols')), - prefix + 'SensorZenith': + dims=("_rows", "_cols")), + prefix + "SensorZenith": xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), attrs={ - 'Slope': np.array([.01] * 1), 'Intercept': np.array([0.] * 1), - 'units': 'degree', - 'valid_range': [0, 28000], + "Slope": np.array([.01] * 1), "Intercept": np.array([0.] * 1), + "units": "degree", + "valid_range": [0, 28000], }, - dims=('_rows', '_cols')), + dims=("_rows", "_cols")), } return geo @@ -225,15 +225,15 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): @property def _rows_per_scan(self): - return self.filetype_info.get('rows_per_scan', 10) + return self.filetype_info.get("rows_per_scan", 10) def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" global_attrs = { - '/attr/Observing Beginning Date': '2019-01-01', - '/attr/Observing Ending Date': '2019-01-01', - '/attr/Observing Beginning Time': '18:27:39.720', - '/attr/Observing Ending Time': '18:38:36.728', + "/attr/Observing Beginning Date": "2019-01-01", + "/attr/Observing Ending Date": "2019-01-01", + "/attr/Observing Beginning Time": "18:27:39.720", + "/attr/Observing Ending Time": "18:38:36.728", } global_attrs = self._set_sensor_attrs(global_attrs) @@ -247,12 +247,12 @@ def get_test_content(self, filename, filename_info, filetype_info): return test_content def _set_sensor_attrs(self, global_attrs): - if 'mersi2_l1b' in self.filetype_info['file_type']: - global_attrs['/attr/Satellite Name'] = 'FY-3D' - global_attrs['/attr/Sensor Identification Code'] = 'MERSI' - elif 'mersi_ll' in self.filetype_info['file_type']: - global_attrs['/attr/Satellite Name'] = 'FY-3E' - global_attrs['/attr/Sensor Identification Code'] = 'MERSI LL' + if "mersi2_l1b" in self.filetype_info["file_type"]: + global_attrs["/attr/Satellite Name"] = "FY-3D" + global_attrs["/attr/Sensor Identification Code"] = "MERSI" + elif "mersi_ll" in self.filetype_info["file_type"]: + global_attrs["/attr/Satellite Name"] = "FY-3E" + global_attrs["/attr/Sensor Identification Code"] = "MERSI LL" return global_attrs def _get_data_file_content(self): @@ -272,7 +272,7 @@ def _add_band_data_file_content(self): num_scans = self.num_scans rows_per_scan = self._rows_per_scan is_mersi2 = self.filetype_info["file_type"].startswith("mersi2_") - is_1km = "_1000" in self.filetype_info['file_type'] + is_1km = "_1000" in self.filetype_info["file_type"] data_func = _get_1km_data if is_1km else (_get_250m_data if is_mersi2 else _get_250m_ll_data) return data_func(num_scans, rows_per_scan, num_cols) @@ -280,12 +280,12 @@ def _add_tbb_coefficients(self, global_attrs): if not self.filetype_info["file_type"].startswith("mersi2_"): return - if "_1000" in self.filetype_info['file_type']: - global_attrs['/attr/TBB_Trans_Coefficient_A'] = np.array([1.0] * 6) - global_attrs['/attr/TBB_Trans_Coefficient_B'] = np.array([0.0] * 6) + if "_1000" in self.filetype_info["file_type"]: + global_attrs["/attr/TBB_Trans_Coefficient_A"] = np.array([1.0] * 6) + global_attrs["/attr/TBB_Trans_Coefficient_B"] = np.array([0.0] * 6) else: - global_attrs['/attr/TBB_Trans_Coefficient_A'] = np.array([0.0] * 6) - global_attrs['/attr/TBB_Trans_Coefficient_B'] = np.array([0.0] * 6) + global_attrs["/attr/TBB_Trans_Coefficient_A"] = np.array([0.0] * 6) + global_attrs["/attr/TBB_Trans_Coefficient_B"] = np.array([0.0] * 6) @property def _num_cols_for_file_type(self): @@ -298,18 +298,18 @@ def _geo_prefix_for_file_type(self): def _test_helper(res): """Remove test code duplication.""" - assert (2 * 40, 2048 * 2) == res['1'].shape - assert 'reflectance' == res['1'].attrs['calibration'] - assert '%' == res['1'].attrs['units'] - assert (2 * 40, 2048 * 2) == res['2'].shape - assert 'reflectance' == res['2'].attrs['calibration'] - assert '%' == res['2'].attrs['units'] - assert (2 * 40, 2048 * 2) == res['3'].shape - assert 'reflectance' == res['3'].attrs['calibration'] - assert '%' == res['3'].attrs['units'] - assert (2 * 40, 2048 * 2) == res['4'].shape - assert 'reflectance' == res['4'].attrs['calibration'] - assert '%' == res['4'].attrs['units'] + assert (2 * 40, 2048 * 2) == res["1"].shape + assert "reflectance" == res["1"].attrs["calibration"] + assert "%" == res["1"].attrs["units"] + assert (2 * 40, 2048 * 2) == res["2"].shape + assert "reflectance" == res["2"].attrs["calibration"] + assert "%" == res["2"].attrs["units"] + assert (2 * 40, 2048 * 2) == res["3"].shape + assert "reflectance" == res["3"].attrs["calibration"] + assert "%" == res["3"].attrs["units"] + assert (2 * 40, 2048 * 2) == res["4"].shape + assert "reflectance" == res["4"].attrs["calibration"] + assert "%" == res["4"].attrs["units"] class MERSIL1BTester: @@ -319,9 +319,9 @@ def setup_method(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.mersi_l1b import MERSIL1B - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(MERSIL1B, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(MERSIL1B, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -334,8 +334,8 @@ class TestMERSI2L1B(MERSIL1BTester): """Test the FY3D MERSI2 L1B reader.""" yaml_file = "mersi2_l1b.yaml" - filenames_1000m = ['tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF'] - filenames_250m = ['tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF', 'tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF'] + filenames_1000m = ["tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF", "tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF"] + filenames_250m = ["tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF", "tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF"] filenames_all = filenames_1000m + filenames_250m def test_all_resolutions(self): @@ -355,8 +355,8 @@ def test_all_resolutions(self): # - Bands 1-4 (visible) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids - for band_name in ('1', '2', '3', '4', '24', '25'): - if band_name in ('24', '25'): + for band_name in ("1", "2", "3", "4", "24", "25"): + if band_name in ("24", "25"): # don't know how to get radiance for IR bands num_results = 2 else: @@ -370,20 +370,20 @@ def test_all_resolutions(self): num_results=num_results, best=False) assert num_results == len(res) - res = reader.load(['1', '2', '3', '4', '5', '20', '24', '25']) + res = reader.load(["1", "2", "3", "4", "5", "20", "24", "25"]) assert len(res) == 8 - assert res['5'].shape == (2 * 10, 2048) - assert res['5'].attrs['calibration'] == 'reflectance' - assert res['5'].attrs['units'] == '%' - assert res['20'].shape == (2 * 10, 2048) - assert res['20'].attrs['calibration'] == 'brightness_temperature' - assert res['20'].attrs['units'] == 'K' - assert res['24'].shape == (2 * 40, 2048 * 2) - assert res['24'].attrs['calibration'] == 'brightness_temperature' - assert res['24'].attrs['units'] == 'K' - assert res['25'].shape == (2 * 40, 2048 * 2) - assert res['25'].attrs['calibration'] == 'brightness_temperature' - assert res['25'].attrs['units'] == 'K' + assert res["5"].shape == (2 * 10, 2048) + assert res["5"].attrs["calibration"] == "reflectance" + assert res["5"].attrs["units"] == "%" + assert res["20"].shape == (2 * 10, 2048) + assert res["20"].attrs["calibration"] == "brightness_temperature" + assert res["20"].attrs["units"] == "K" + assert res["24"].shape == (2 * 40, 2048 * 2) + assert res["24"].attrs["calibration"] == "brightness_temperature" + assert res["24"].attrs["units"] == "K" + assert res["25"].shape == (2 * 40, 2048 * 2) + assert res["25"].attrs["calibration"] == "brightness_temperature" + assert res["25"].attrs["units"] == "K" def test_counts_calib(self): """Test loading data at counts calibration.""" @@ -398,43 +398,43 @@ def test_counts_calib(self): assert reader.file_handlers ds_ids = [] - for band_name in ['1', '2', '3', '4', '5', '20', '24', '25']: - ds_ids.append(make_dataid(name=band_name, calibration='counts')) - ds_ids.append(make_dataid(name='satellite_zenith_angle')) + for band_name in ["1", "2", "3", "4", "5", "20", "24", "25"]: + ds_ids.append(make_dataid(name=band_name, calibration="counts")) + ds_ids.append(make_dataid(name="satellite_zenith_angle")) res = reader.load(ds_ids) assert len(res) == 9 - assert res['1'].shape == (2 * 40, 2048 * 2) - assert res['1'].attrs['calibration'] == 'counts' - assert res['1'].dtype == np.uint16 - assert res['1'].attrs['units'] == '1' - assert res['2'].shape == (2 * 40, 2048 * 2) - assert res['2'].attrs['calibration'] == 'counts' - assert res['2'].dtype == np.uint16 - assert res['2'].attrs['units'] == '1' - assert res['3'].shape == (2 * 40, 2048 * 2) - assert res['3'].attrs['calibration'] == 'counts' - assert res['3'].dtype == np.uint16 - assert res['3'].attrs['units'] == '1' - assert res['4'].shape == (2 * 40, 2048 * 2) - assert res['4'].attrs['calibration'] == 'counts' - assert res['4'].dtype == np.uint16 - assert res['4'].attrs['units'] == '1' - assert res['5'].shape == (2 * 10, 2048) - assert res['5'].attrs['calibration'] == 'counts' - assert res['5'].dtype == np.uint16 - assert res['5'].attrs['units'] == '1' - assert res['20'].shape == (2 * 10, 2048) - assert res['20'].attrs['calibration'] == 'counts' - assert res['20'].dtype == np.uint16 - assert res['20'].attrs['units'] == '1' - assert res['24'].shape == (2 * 40, 2048 * 2) - assert res['24'].attrs['calibration'] == 'counts' - assert res['24'].dtype == np.uint16 - assert res['24'].attrs['units'] == '1' - assert res['25'].shape == (2 * 40, 2048 * 2) - assert res['25'].attrs['calibration'] == 'counts' - assert res['25'].dtype == np.uint16 - assert res['25'].attrs['units'] == '1' + assert res["1"].shape == (2 * 40, 2048 * 2) + assert res["1"].attrs["calibration"] == "counts" + assert res["1"].dtype == np.uint16 + assert res["1"].attrs["units"] == "1" + assert res["2"].shape == (2 * 40, 2048 * 2) + assert res["2"].attrs["calibration"] == "counts" + assert res["2"].dtype == np.uint16 + assert res["2"].attrs["units"] == "1" + assert res["3"].shape == (2 * 40, 2048 * 2) + assert res["3"].attrs["calibration"] == "counts" + assert res["3"].dtype == np.uint16 + assert res["3"].attrs["units"] == "1" + assert res["4"].shape == (2 * 40, 2048 * 2) + assert res["4"].attrs["calibration"] == "counts" + assert res["4"].dtype == np.uint16 + assert res["4"].attrs["units"] == "1" + assert res["5"].shape == (2 * 10, 2048) + assert res["5"].attrs["calibration"] == "counts" + assert res["5"].dtype == np.uint16 + assert res["5"].attrs["units"] == "1" + assert res["20"].shape == (2 * 10, 2048) + assert res["20"].attrs["calibration"] == "counts" + assert res["20"].dtype == np.uint16 + assert res["20"].attrs["units"] == "1" + assert res["24"].shape == (2 * 40, 2048 * 2) + assert res["24"].attrs["calibration"] == "counts" + assert res["24"].dtype == np.uint16 + assert res["24"].attrs["units"] == "1" + assert res["25"].shape == (2 * 40, 2048 * 2) + assert res["25"].attrs["calibration"] == "counts" + assert res["25"].dtype == np.uint16 + assert res["25"].attrs["units"] == "1" def test_rad_calib(self): """Test loading data at radiance calibration.""" @@ -449,25 +449,25 @@ def test_rad_calib(self): assert reader.file_handlers ds_ids = [] - for band_name in ['1', '2', '3', '4', '5']: - ds_ids.append(make_dataid(name=band_name, calibration='radiance')) + for band_name in ["1", "2", "3", "4", "5"]: + ds_ids.append(make_dataid(name=band_name, calibration="radiance")) res = reader.load(ds_ids) assert len(res) == 5 - assert res['1'].shape == (2 * 40, 2048 * 2) - assert res['1'].attrs['calibration'] == 'radiance' - assert res['1'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['2'].shape == (2 * 40, 2048 * 2) - assert res['2'].attrs['calibration'] == 'radiance' - assert res['2'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['3'].shape == (2 * 40, 2048 * 2) - assert res['3'].attrs['calibration'] == 'radiance' - assert res['3'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['4'].shape == (2 * 40, 2048 * 2) - assert res['4'].attrs['calibration'] == 'radiance' - assert res['4'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['5'].shape == (2 * 10, 2048) - assert res['5'].attrs['calibration'] == 'radiance' - assert res['5'].attrs['units'] == 'mW/ (m2 cm-1 sr)' + assert res["1"].shape == (2 * 40, 2048 * 2) + assert res["1"].attrs["calibration"] == "radiance" + assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["2"].shape == (2 * 40, 2048 * 2) + assert res["2"].attrs["calibration"] == "radiance" + assert res["2"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["3"].shape == (2 * 40, 2048 * 2) + assert res["3"].attrs["calibration"] == "radiance" + assert res["3"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["4"].shape == (2 * 40, 2048 * 2) + assert res["4"].attrs["calibration"] == "radiance" + assert res["4"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["5"].shape == (2 * 10, 2048) + assert res["5"].attrs["calibration"] == "radiance" + assert res["5"].attrs["units"] == "mW/ (m2 cm-1 sr)" def test_1km_resolutions(self): """Test loading data when only 1km resolutions are available.""" @@ -486,8 +486,8 @@ def test_1km_resolutions(self): # - Bands 1-4 (visible) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids - for band_name in ('1', '2', '3', '4', '24', '25'): - if band_name in ('24', '25'): + for band_name in ("1", "2", "3", "4", "24", "25"): + if band_name in ("24", "25"): # don't know how to get radiance for IR bands num_results = 2 else: @@ -500,32 +500,32 @@ def test_1km_resolutions(self): num_results=num_results, best=False) assert num_results == len(res) - res = reader.load(['1', '2', '3', '4', '5', '20', '24', '25']) + res = reader.load(["1", "2", "3", "4", "5", "20", "24", "25"]) assert len(res) == 8 - assert res['1'].shape == (2 * 10, 2048) - assert res['1'].attrs['calibration'] == 'reflectance' - assert res['1'].attrs['units'] == '%' - assert res['2'].shape == (2 * 10, 2048) - assert res['2'].attrs['calibration'] == 'reflectance' - assert res['2'].attrs['units'] == '%' - assert res['3'].shape == (2 * 10, 2048) - assert res['3'].attrs['calibration'] == 'reflectance' - assert res['3'].attrs['units'] == '%' - assert res['4'].shape == (2 * 10, 2048) - assert res['4'].attrs['calibration'] == 'reflectance' - assert res['4'].attrs['units'] == '%' - assert res['5'].shape == (2 * 10, 2048) - assert res['5'].attrs['calibration'] == 'reflectance' - assert res['5'].attrs['units'] == '%' - assert res['20'].shape == (2 * 10, 2048) - assert res['20'].attrs['calibration'] == 'brightness_temperature' - assert res['20'].attrs['units'] == 'K' - assert res['24'].shape == (2 * 10, 2048) - assert res['24'].attrs['calibration'] == 'brightness_temperature' - assert res['24'].attrs['units'] == 'K' - assert res['25'].shape == (2 * 10, 2048) - assert res['25'].attrs['calibration'] == 'brightness_temperature' - assert res['25'].attrs['units'] == 'K' + assert res["1"].shape == (2 * 10, 2048) + assert res["1"].attrs["calibration"] == "reflectance" + assert res["1"].attrs["units"] == "%" + assert res["2"].shape == (2 * 10, 2048) + assert res["2"].attrs["calibration"] == "reflectance" + assert res["2"].attrs["units"] == "%" + assert res["3"].shape == (2 * 10, 2048) + assert res["3"].attrs["calibration"] == "reflectance" + assert res["3"].attrs["units"] == "%" + assert res["4"].shape == (2 * 10, 2048) + assert res["4"].attrs["calibration"] == "reflectance" + assert res["4"].attrs["units"] == "%" + assert res["5"].shape == (2 * 10, 2048) + assert res["5"].attrs["calibration"] == "reflectance" + assert res["5"].attrs["units"] == "%" + assert res["20"].shape == (2 * 10, 2048) + assert res["20"].attrs["calibration"] == "brightness_temperature" + assert res["20"].attrs["units"] == "K" + assert res["24"].shape == (2 * 10, 2048) + assert res["24"].attrs["calibration"] == "brightness_temperature" + assert res["24"].attrs["units"] == "K" + assert res["25"].shape == (2 * 10, 2048) + assert res["25"].attrs["calibration"] == "brightness_temperature" + assert res["25"].attrs["units"] == "K" def test_250_resolutions(self): """Test loading data when only 250m resolutions are available.""" @@ -544,8 +544,8 @@ def test_250_resolutions(self): # - Bands 1-4 (visible) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids - for band_name in ('1', '2', '3', '4', '24', '25'): - if band_name in ('24', '25'): + for band_name in ("1", "2", "3", "4", "24", "25"): + if band_name in ("24", "25"): # don't know how to get radiance for IR bands num_results = 2 else: @@ -558,27 +558,27 @@ def test_250_resolutions(self): with pytest.raises(KeyError): get_key(ds_id, available_datasets, num_results=num_results, best=False) - res = reader.load(['1', '2', '3', '4', '5', '20', '24', '25']) + res = reader.load(["1", "2", "3", "4", "5", "20", "24", "25"]) assert len(res) == 6 with pytest.raises(KeyError): - res.__getitem__('5') + res.__getitem__("5") with pytest.raises(KeyError): - res.__getitem__('20') + res.__getitem__("20") _test_helper(res) - assert res['24'].shape == (2 * 40, 2048 * 2) - assert res['24'].attrs['calibration'] == 'brightness_temperature' - assert res['24'].attrs['units'] == 'K' - assert res['25'].shape == (2 * 40, 2048 * 2) - assert res['25'].attrs['calibration'] == 'brightness_temperature' - assert res['25'].attrs['units'] == 'K' + assert res["24"].shape == (2 * 40, 2048 * 2) + assert res["24"].attrs["calibration"] == "brightness_temperature" + assert res["24"].attrs["units"] == "K" + assert res["25"].shape == (2 * 40, 2048 * 2) + assert res["25"].attrs["calibration"] == "brightness_temperature" + assert res["25"].attrs["units"] == "K" class TestMERSILLL1B(MERSIL1BTester): """Test the FY3E MERSI-LL L1B reader.""" yaml_file = "mersi_ll_l1b.yaml" - filenames_1000m = ['FY3E_MERSI_GRAN_L1_20230410_1910_1000M_V0.HDF', 'FY3E_MERSI_GRAN_L1_20230410_1910_GEO1K_V0.HDF'] - filenames_250m = ['FY3E_MERSI_GRAN_L1_20230410_1910_0250M_V0.HDF', 'FY3E_MERSI_GRAN_L1_20230410_1910_GEOQK_V0.HDF'] + filenames_1000m = ["FY3E_MERSI_GRAN_L1_20230410_1910_1000M_V0.HDF", "FY3E_MERSI_GRAN_L1_20230410_1910_GEO1K_V0.HDF"] + filenames_250m = ["FY3E_MERSI_GRAN_L1_20230410_1910_0250M_V0.HDF", "FY3E_MERSI_GRAN_L1_20230410_1910_GEOQK_V0.HDF"] filenames_all = filenames_1000m + filenames_250m def test_all_resolutions(self): @@ -598,7 +598,7 @@ def test_all_resolutions(self): # - Bands 1-4 (visible) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids - for band_name in ('6', '7'): + for band_name in ("6", "7"): num_results = 2 ds_id = make_dataid(name=band_name, resolution=250) res = get_key(ds_id, available_datasets, @@ -609,17 +609,17 @@ def test_all_resolutions(self): num_results=num_results, best=False) assert num_results == len(res) - res = reader.load(['1', '2', '4', '7']) + res = reader.load(["1", "2", "4", "7"]) assert len(res) == 4 - assert res['4'].shape == (2 * 10, 2048) - assert res['1'].attrs['calibration'] == 'radiance' - assert res['1'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['2'].shape == (2 * 10, 2048) - assert res['2'].attrs['calibration'] == 'brightness_temperature' - assert res['2'].attrs['units'] == 'K' - assert res['7'].shape == (2 * 40, 2048 * 2) - assert res['7'].attrs['calibration'] == 'brightness_temperature' - assert res['7'].attrs['units'] == 'K' + assert res["4"].shape == (2 * 10, 2048) + assert res["1"].attrs["calibration"] == "radiance" + assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["2"].shape == (2 * 10, 2048) + assert res["2"].attrs["calibration"] == "brightness_temperature" + assert res["2"].attrs["units"] == "K" + assert res["7"].shape == (2 * 40, 2048 * 2) + assert res["7"].attrs["calibration"] == "brightness_temperature" + assert res["7"].attrs["units"] == "K" def test_rad_calib(self): """Test loading data at radiance calibration.""" @@ -634,25 +634,25 @@ def test_rad_calib(self): assert reader.file_handlers ds_ids = [] - for band_name in ['1', '3', '4', '6', '7']: - ds_ids.append(make_dataid(name=band_name, calibration='radiance')) + for band_name in ["1", "3", "4", "6", "7"]: + ds_ids.append(make_dataid(name=band_name, calibration="radiance")) res = reader.load(ds_ids) assert len(res) == 5 - assert res['1'].shape == (2 * 10, 2048) - assert res['1'].attrs['calibration'] == 'radiance' - assert res['1'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['3'].shape == (2 * 10, 2048) - assert res['3'].attrs['calibration'] == 'radiance' - assert res['3'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['4'].shape == (2 * 10, 2048) - assert res['4'].attrs['calibration'] == 'radiance' - assert res['4'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['6'].shape == (2 * 40, 2048 * 2) - assert res['6'].attrs['calibration'] == 'radiance' - assert res['6'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['7'].shape == (2 * 40, 2048 * 2) - assert res['7'].attrs['calibration'] == 'radiance' - assert res['7'].attrs['units'] == 'mW/ (m2 cm-1 sr)' + assert res["1"].shape == (2 * 10, 2048) + assert res["1"].attrs["calibration"] == "radiance" + assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["3"].shape == (2 * 10, 2048) + assert res["3"].attrs["calibration"] == "radiance" + assert res["3"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["4"].shape == (2 * 10, 2048) + assert res["4"].attrs["calibration"] == "radiance" + assert res["4"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["6"].shape == (2 * 40, 2048 * 2) + assert res["6"].attrs["calibration"] == "radiance" + assert res["6"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["7"].shape == (2 * 40, 2048 * 2) + assert res["7"].attrs["calibration"] == "radiance" + assert res["7"].attrs["units"] == "mW/ (m2 cm-1 sr)" def test_1km_resolutions(self): """Test loading data when only 1km resolutions are available.""" @@ -671,8 +671,8 @@ def test_1km_resolutions(self): # - Band 6-7 (IR) # - Bands 24-25 (IR) available_datasets = reader.available_dataset_ids - for band_name in ('1', '2', '3', '4', '6', '7'): - if band_name == '1': + for band_name in ("1", "2", "3", "4", "6", "7"): + if band_name == "1": # don't know how to get anything apart from radiance for LL band num_results = 1 else: @@ -683,31 +683,31 @@ def test_1km_resolutions(self): ds_id = make_dataid(name=band_name, resolution=1000) res = get_key(ds_id, available_datasets, num_results=num_results, best=False) - if band_name == '1': + if band_name == "1": assert num_results == len([res]) else: assert num_results == len(res) - res = reader.load(['1', '2', '3', '5', '6', '7']) + res = reader.load(["1", "2", "3", "5", "6", "7"]) assert len(res) == 6 - assert res['1'].shape == (2 * 10, 2048) - assert 'radiance' == res['1'].attrs['calibration'] - assert res['1'].attrs['units'] == 'mW/ (m2 cm-1 sr)' - assert res['2'].shape == (2 * 10, 2048) - assert 'brightness_temperature' == res['2'].attrs['calibration'] - assert res['2'].attrs['units'] == 'K' - assert res['3'].shape == (2 * 10, 2048) - assert 'brightness_temperature' == res['3'].attrs['calibration'] - assert res['3'].attrs['units'] == 'K' - assert res['5'].shape == (2 * 10, 2048) - assert 'brightness_temperature' == res['5'].attrs['calibration'] - assert res['5'].attrs['units'] == 'K' - assert res['6'].shape == (2 * 10, 2048) - assert 'brightness_temperature' == res['6'].attrs['calibration'] - assert res['6'].attrs['units'] == 'K' - assert res['7'].shape == (2 * 10, 2048) - assert 'brightness_temperature' == res['7'].attrs['calibration'] - assert res['7'].attrs['units'] == 'K' + assert res["1"].shape == (2 * 10, 2048) + assert "radiance" == res["1"].attrs["calibration"] + assert res["1"].attrs["units"] == "mW/ (m2 cm-1 sr)" + assert res["2"].shape == (2 * 10, 2048) + assert "brightness_temperature" == res["2"].attrs["calibration"] + assert res["2"].attrs["units"] == "K" + assert res["3"].shape == (2 * 10, 2048) + assert "brightness_temperature" == res["3"].attrs["calibration"] + assert res["3"].attrs["units"] == "K" + assert res["5"].shape == (2 * 10, 2048) + assert "brightness_temperature" == res["5"].attrs["calibration"] + assert res["5"].attrs["units"] == "K" + assert res["6"].shape == (2 * 10, 2048) + assert "brightness_temperature" == res["6"].attrs["calibration"] + assert res["6"].attrs["units"] == "K" + assert res["7"].shape == (2 * 10, 2048) + assert "brightness_temperature" == res["7"].attrs["calibration"] + assert res["7"].attrs["units"] == "K" def test_250_resolutions(self): """Test loading data when only 250m resolutions are available.""" @@ -725,7 +725,7 @@ def test_250_resolutions(self): # Verify that we have multiple resolutions for: # - Bands 6-7 available_datasets = reader.available_dataset_ids - for band_name in ('6', '7'): + for band_name in ("6", "7"): num_results = 2 ds_id = make_dataid(name=band_name, resolution=250) res = get_key(ds_id, available_datasets, @@ -735,13 +735,13 @@ def test_250_resolutions(self): with pytest.raises(KeyError): get_key(ds_id, available_datasets, num_results=num_results, best=False) - res = reader.load(['1', '6', '7']) + res = reader.load(["1", "6", "7"]) assert 2 == len(res) with pytest.raises(KeyError): - res.__getitem__('1') - assert (2 * 40, 2048 * 2) == res['6'].shape - assert 'brightness_temperature' == res['6'].attrs['calibration'] - assert 'K' == res['6'].attrs['units'] - assert (2 * 40, 2048 * 2) == res['7'].shape - assert 'brightness_temperature' == res['7'].attrs['calibration'] - assert 'K' == res['7'].attrs['units'] + res.__getitem__("1") + assert (2 * 40, 2048 * 2) == res["6"].shape + assert "brightness_temperature" == res["6"].attrs["calibration"] + assert "K" == res["6"].attrs["units"] + assert (2 * 40, 2048 * 2) == res["7"].shape + assert "brightness_temperature" == res["7"].attrs["calibration"] + assert "K" == res["7"].attrs["units"] diff --git a/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py b/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py index cafadf9e77..4083f7de00 100644 --- a/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py +++ b/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py @@ -39,9 +39,9 @@ DEFAULT_FILE_DATE_DATA = np.clip(DEFAULT_FILE_FLOAT_DATA, 0, 1049) DEFAULT_FILE_UBYTE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1], dtype=np.ubyte) -float_variables = ['tpwGrid', 'tpwGridPrior', 'tpwGridSubseq', 'footGridPrior', 'footGridSubseq'] -date_variables = ['timeAwayGridPrior', 'timeAwayGridSubseq'] -ubyte_variables = ['satGridPrior', 'satGridSubseq'] +float_variables = ["tpwGrid", "tpwGridPrior", "tpwGridSubseq", "footGridPrior", "footGridSubseq"] +date_variables = ["timeAwayGridPrior", "timeAwayGridSubseq"] +ubyte_variables = ["satGridPrior", "satGridSubseq"] file_content_attr = dict() @@ -50,57 +50,57 @@ class FakeNetCDF4FileHandlerMimicLow(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content for lower resolution files.""" - dt_s = filename_info.get('start_time', DEFAULT_DATE) - dt_e = filename_info.get('end_time', DEFAULT_DATE) + dt_s = filename_info.get("start_time", DEFAULT_DATE) + dt_e = filename_info.get("end_time", DEFAULT_DATE) - if filetype_info['file_type'] == 'mimicTPW2_comp': + if filetype_info["file_type"] == "mimicTPW2_comp": file_content = { - '/attr/start_time': dt_s.strftime('%Y%m%d.%H%M%S'), - '/attr/end_time': dt_e.strftime('%Y%m%d.%H%M%S'), - '/attr/platform_shortname': 'aggregated microwave', - '/attr/sensor': 'mimic', + "/attr/start_time": dt_s.strftime("%Y%m%d.%H%M%S"), + "/attr/end_time": dt_e.strftime("%Y%m%d.%H%M%S"), + "/attr/platform_shortname": "aggregated microwave", + "/attr/sensor": "mimic", } - file_content['latArr'] = DEFAULT_LAT - file_content['latArr/shape'] = (DEFAULT_FILE_SHAPE[0],) - file_content['latArr/attr/units'] = 'degress_north' + file_content["latArr"] = DEFAULT_LAT + file_content["latArr/shape"] = (DEFAULT_FILE_SHAPE[0],) + file_content["latArr/attr/units"] = "degress_north" - file_content['lonArr'] = DEFAULT_LON - file_content['lonArr/shape'] = (DEFAULT_FILE_SHAPE[1],) - file_content['lonArr/attr/units'] = 'degrees_east' + file_content["lonArr"] = DEFAULT_LON + file_content["lonArr/shape"] = (DEFAULT_FILE_SHAPE[1],) + file_content["lonArr/attr/units"] = "degrees_east" - file_content['/dimension/lat'] = DEFAULT_FILE_SHAPE[0] - file_content['/dimension/lon'] = DEFAULT_FILE_SHAPE[1] + file_content["/dimension/lat"] = DEFAULT_FILE_SHAPE[0] + file_content["/dimension/lon"] = DEFAULT_FILE_SHAPE[1] for float_var in float_variables: file_content[float_var] = DEFAULT_FILE_FLOAT_DATA.reshape(DEFAULT_FILE_SHAPE) - file_content['{}/shape'.format(float_var)] = DEFAULT_FILE_SHAPE + file_content["{}/shape".format(float_var)] = DEFAULT_FILE_SHAPE file_content_attr[float_var] = {"units": "mm"} for date_var in date_variables: file_content[date_var] = DEFAULT_FILE_DATE_DATA.reshape(DEFAULT_FILE_SHAPE) - file_content['{}/shape'.format(date_var)] = DEFAULT_FILE_SHAPE + file_content["{}/shape".format(date_var)] = DEFAULT_FILE_SHAPE file_content_attr[date_var] = {"units": "minutes"} for ubyte_var in ubyte_variables: file_content[ubyte_var] = DEFAULT_FILE_UBYTE_DATA.reshape(DEFAULT_FILE_SHAPE) - file_content['{}/shape'.format(ubyte_var)] = DEFAULT_FILE_SHAPE + file_content["{}/shape".format(ubyte_var)] = DEFAULT_FILE_SHAPE file_content_attr[ubyte_var] = {"source_key": "Key: 0: None, 1: NOAA-N, 2: NOAA-P, 3: Metop-A, \ 4: Metop-B, 5: SNPP, 6: SSMI-17, 7: SSMI-18"} # convert to xarrays for key, val in file_content.items(): - if key == 'lonArr' or key == 'latArr': + if key == "lonArr" or key == "latArr": file_content[key] = xr.DataArray(val) elif isinstance(val, np.ndarray): if val.ndim > 1: - file_content[key] = xr.DataArray(val, dims=('y', 'x'), attrs=file_content_attr[key]) + file_content[key] = xr.DataArray(val, dims=("y", "x"), attrs=file_content_attr[key]) else: file_content[key] = xr.DataArray(val) for key in itertools.chain(float_variables, ubyte_variables): - file_content[key].attrs['_FillValue'] = -999.0 - file_content[key].attrs['name'] = key - file_content[key].attrs['file_key'] = key - file_content[key].attrs['file_type'] = self.filetype_info['file_type'] + file_content[key].attrs["_FillValue"] = -999.0 + file_content[key].attrs["name"] = key + file_content[key].attrs["file_key"] = key + file_content[key].attrs["file_type"] = self.filetype_info["file_type"] else: - msg = 'Wrong Test Reader for file_type {}'.format(filetype_info['file_type']) + msg = "Wrong Test Reader for file_type {}".format(filetype_info["file_type"]) raise AssertionError(msg) return file_content @@ -115,9 +115,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.mimic_TPW2_nc import MimicTPW2FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(MimicTPW2FileHandler, '__bases__', (FakeNetCDF4FileHandlerMimicLow,)) + self.p = mock.patch.object(MimicTPW2FileHandler, "__bases__", (FakeNetCDF4FileHandlerMimicLow,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -130,65 +130,65 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'comp20190619.130000.nc', + "comp20190619.130000.nc", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_mimic_float(self): """Load TPW mimic float data.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.mimic_TPW2_nc.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.mimic_TPW2_nc.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'comp20190619.130000.nc', + "comp20190619.130000.nc", ]) r.create_filehandlers(loadables) ds = r.load(float_variables) - self.assertEqual(len(ds), len(float_variables)) + assert len(ds) == len(float_variables) for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'aggregated microwave') - self.assertEqual(d.attrs['sensor'], 'mimic') - self.assertEqual(d.attrs['units'], 'mm') - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + assert d.attrs["platform_shortname"] == "aggregated microwave" + assert d.attrs["sensor"] == "mimic" + assert d.attrs["units"] == "mm" + assert "area" in d.attrs + assert d.attrs["area"] is not None def test_load_mimic_timedelta(self): """Load TPW mimic timedelta data (data latency variables).""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.mimic_TPW2_nc.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.mimic_TPW2_nc.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'comp20190619.130000.nc', + "comp20190619.130000.nc", ]) r.create_filehandlers(loadables) ds = r.load(date_variables) - self.assertEqual(len(ds), len(date_variables)) + assert len(ds) == len(date_variables) for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'aggregated microwave') - self.assertEqual(d.attrs['sensor'], 'mimic') - self.assertEqual(d.attrs['units'], 'minutes') - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) - self.assertEqual(d.dtype, DEFAULT_FILE_DTYPE) + assert d.attrs["platform_shortname"] == "aggregated microwave" + assert d.attrs["sensor"] == "mimic" + assert d.attrs["units"] == "minutes" + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert d.dtype == DEFAULT_FILE_DTYPE def test_load_mimic_ubyte(self): """Load TPW mimic sensor grids.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.mimic_TPW2_nc.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.mimic_TPW2_nc.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'comp20190619.130000.nc', + "comp20190619.130000.nc", ]) r.create_filehandlers(loadables) ds = r.load(ubyte_variables) - self.assertEqual(len(ds), len(ubyte_variables)) + assert len(ds) == len(ubyte_variables) for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'aggregated microwave') - self.assertEqual(d.attrs['sensor'], 'mimic') - self.assertIn('source_key', d.attrs) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) - self.assertEqual(d.dtype, np.uint8) + assert d.attrs["platform_shortname"] == "aggregated microwave" + assert d.attrs["sensor"] == "mimic" + assert "source_key" in d.attrs + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert d.dtype == np.uint8 diff --git a/satpy/tests/reader_tests/test_mimic_TPW2_nc.py b/satpy/tests/reader_tests/test_mimic_TPW2_nc.py index 544c805e70..63214b0477 100644 --- a/satpy/tests/reader_tests/test_mimic_TPW2_nc.py +++ b/satpy/tests/reader_tests/test_mimic_TPW2_nc.py @@ -43,42 +43,42 @@ class FakeNetCDF4FileHandlerMimic(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" from xarray import DataArray - dt_s = filename_info.get('start_time', datetime(2019, 6, 19, 13, 0)) - dt_e = filename_info.get('end_time', datetime(2019, 6, 19, 13, 0)) + dt_s = filename_info.get("start_time", datetime(2019, 6, 19, 13, 0)) + dt_e = filename_info.get("end_time", datetime(2019, 6, 19, 13, 0)) - if filetype_info['file_type'] == 'mimicTPW2_comp': + if filetype_info["file_type"] == "mimicTPW2_comp": file_content = { - '/attr/start_time': dt_s.strftime('%Y%m%d.%H%M%S'), - '/attr/end_time': dt_e.strftime('%Y%m%d.%H%M%S'), - '/attr/platform_shortname': 'aggregated microwave', - '/attr/sensor': 'mimic', + "/attr/start_time": dt_s.strftime("%Y%m%d.%H%M%S"), + "/attr/end_time": dt_e.strftime("%Y%m%d.%H%M%S"), + "/attr/platform_shortname": "aggregated microwave", + "/attr/sensor": "mimic", } - file_content['latArr'] = DEFAULT_LAT - file_content['latArr/shape'] = (DEFAULT_FILE_SHAPE[0],) - file_content['latArr/attr/units'] = 'degress_north' + file_content["latArr"] = DEFAULT_LAT + file_content["latArr/shape"] = (DEFAULT_FILE_SHAPE[0],) + file_content["latArr/attr/units"] = "degress_north" - file_content['lonArr'] = DEFAULT_LON - file_content['lonArr/shape'] = (DEFAULT_FILE_SHAPE[1],) - file_content['lonArr/attr/units'] = 'degrees_east' + file_content["lonArr"] = DEFAULT_LON + file_content["lonArr/shape"] = (DEFAULT_FILE_SHAPE[1],) + file_content["lonArr/attr/units"] = "degrees_east" - file_content['tpwGrid'] = DEFAULT_FILE_DATA - file_content['tpwGrid/shape'] = DEFAULT_FILE_SHAPE - file_content_units['tpwGrid'] = 'mm' + file_content["tpwGrid"] = DEFAULT_FILE_DATA + file_content["tpwGrid/shape"] = DEFAULT_FILE_SHAPE + file_content_units["tpwGrid"] = "mm" - file_content['/dimension/lat'] = DEFAULT_FILE_SHAPE[0] - file_content['/dimension/lon'] = DEFAULT_FILE_SHAPE[1] + file_content["/dimension/lat"] = DEFAULT_FILE_SHAPE[0] + file_content["/dimension/lon"] = DEFAULT_FILE_SHAPE[1] # convert to xarrays for key, val in file_content.items(): - if key == 'lonArr' or key == 'latArr': + if key == "lonArr" or key == "latArr": file_content[key] = DataArray(val) elif isinstance(val, np.ndarray): if val.ndim > 1: - file_content[key] = DataArray(val, dims=('y', 'x'), attrs={"units": file_content_units[key]}) + file_content[key] = DataArray(val, dims=("y", "x"), attrs={"units": file_content_units[key]}) else: file_content[key] = DataArray(val) else: - msg = 'Wrong Test Reader for file_type {}'.format(filetype_info['file_type']) + msg = "Wrong Test Reader for file_type {}".format(filetype_info["file_type"]) raise AssertionError(msg) return file_content @@ -93,9 +93,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.mimic_TPW2_nc import MimicTPW2FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(MimicTPW2FileHandler, '__bases__', (FakeNetCDF4FileHandlerMimic,)) + self.p = mock.patch.object(MimicTPW2FileHandler, "__bases__", (FakeNetCDF4FileHandlerMimic,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -108,27 +108,27 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'comp20190619.130000.nc', + "comp20190619.130000.nc", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_mimic(self): """Load Mimic data.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.mimic_TPW2_nc.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.mimic_TPW2_nc.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'comp20190619.130000.nc', + "comp20190619.130000.nc", ]) r.create_filehandlers(loadables) - ds = r.load(['tpwGrid']) - self.assertEqual(len(ds), 1) + ds = r.load(["tpwGrid"]) + assert len(ds) == 1 for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'aggregated microwave') - self.assertEqual(d.attrs['sensor'], 'mimic') - self.assertIn('area', d.attrs) - self.assertIn('units', d.attrs) - self.assertIsNotNone(d.attrs['area']) + assert d.attrs["platform_shortname"] == "aggregated microwave" + assert d.attrs["sensor"] == "mimic" + assert "area" in d.attrs + assert "units" in d.attrs + assert d.attrs["area"] is not None diff --git a/satpy/tests/reader_tests/test_mirs.py b/satpy/tests/reader_tests/test_mirs.py index b726a519e5..69f5543411 100644 --- a/satpy/tests/reader_tests/test_mirs.py +++ b/satpy/tests/reader_tests/test_mirs.py @@ -46,17 +46,17 @@ FREQ = xr.DataArray([23.8, 31.4, 50.3, 51.76, 52.8, 53.596, 54.4, 54.94, 55.5, 57.29, 57.29, 57.29, 57.29, 57.29, 57.29, 88.2, 165.5, 183.31, 183.31, 183.31, 183.31, 183.31][:N_CHANNEL], - dims='Channel', - attrs={'description': "Central Frequencies (GHz)"}) + dims="Channel", + attrs={"description": "Central Frequencies (GHz)"}) POLO = xr.DataArray([2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, - 3, 3, 3][:N_CHANNEL], dims='Channel', - attrs={'description': "Polarizations"}) - -DS_IDS = ['RR', 'longitude', 'latitude'] -TEST_VARS = ['btemp_88v', 'btemp_165h', - 'btemp_23v', 'RR', 'Sfc_type'] -DEFAULT_UNITS = {'btemp_88v': 'K', 'btemp_165h': 'K', - 'btemp_23v': 'K', 'RR': 'mm/hr', 'Sfc_type': "1"} + 3, 3, 3][:N_CHANNEL], dims="Channel", + attrs={"description": "Polarizations"}) + +DS_IDS = ["RR", "longitude", "latitude"] +TEST_VARS = ["btemp_88v", "btemp_165h", + "btemp_23v", "RR", "Sfc_type"] +DEFAULT_UNITS = {"btemp_88v": "K", "btemp_165h": "K", + "btemp_23v": "K", "RR": "mm/hr", "Sfc_type": "1"} PLATFORM = {"M2": "metop-a", "NPP": "npp", "GPM": "gpm"} SENSOR = {"m2": "amsu-mhs", "npp": "atms", "gpm": "GPI"} @@ -96,20 +96,20 @@ def fake_coeff_from_fn(fn): coeff_str = [] for idx in range(1, N_CHANNEL + 1): nx = idx - 1 - coeff_str.append('\n') - next_line = ' {} {} {}\n'.format(idx, all_nchx[nx], ameans[nx]) + coeff_str.append("\n") + next_line = " {} {} {}\n".format(idx, all_nchx[nx], ameans[nx]) coeff_str.append(next_line) - next_line = ' {}\n'.format(" ".join([str(x) for x in locations[idx - 1]])) + next_line = " {}\n".format(" ".join([str(x) for x in locations[idx - 1]])) coeff_str.append(next_line) for fov in range(1, N_FOV+1): random_coeff = np.random.rand(all_nchx[nx]) random_coeff = np.ones(all_nchx[nx]) - str_coeff = ' '.join([str(x) for x in random_coeff]) + str_coeff = " ".join([str(x) for x in random_coeff]) random_means = np.random.uniform(261, 267, all_nchx[nx]) random_means = np.zeros(all_nchx[nx]) - str_means = ' '.join([str(x) for x in random_means]) + str_means = " ".join([str(x) for x in random_means]) error_val = np.random.uniform(0, 4) - coeffs_line = ' {:>2} {:>2} {} {} {}\n'.format(idx, fov, + coeffs_line = " {:>2} {:>2} {} {} {}\n".format(idx, fov, str_coeff, str_means, error_val) @@ -122,50 +122,50 @@ def _get_datasets_with_attributes(**kwargs): """Represent files with two resolution of variables in them (ex. OCEAN).""" bt = xr.DataArray(np.linspace(1830, 3930, N_SCANLINE * N_FOV * N_CHANNEL). reshape(N_SCANLINE, N_FOV, N_CHANNEL), - attrs={'long_name': "Channel Temperature (K)", - 'units': "Kelvin", - 'coordinates': "Longitude Latitude Freq", - 'scale_factor': 0.01, - '_FillValue': -999, - 'valid_range': [0, 50000]}, - dims=('Scanline', 'Field_of_view', 'Channel')) + attrs={"long_name": "Channel Temperature (K)", + "units": "Kelvin", + "coordinates": "Longitude Latitude Freq", + "scale_factor": 0.01, + "_FillValue": -999, + "valid_range": [0, 50000]}, + dims=("Scanline", "Field_of_view", "Channel")) rr = xr.DataArray(np.random.randint(100, 500, size=(N_SCANLINE, N_FOV)), - attrs={'long_name': "Rain Rate (mm/hr)", - 'units': "mm/hr", - 'coordinates': "Longitude Latitude", - 'scale_factor': 0.1, - '_FillValue': -999, - 'valid_range': [0, 1000]}, - dims=('Scanline', 'Field_of_view')) + attrs={"long_name": "Rain Rate (mm/hr)", + "units": "mm/hr", + "coordinates": "Longitude Latitude", + "scale_factor": 0.1, + "_FillValue": -999, + "valid_range": [0, 1000]}, + dims=("Scanline", "Field_of_view")) sfc_type = xr.DataArray(np.random.randint(0, 4, size=(N_SCANLINE, N_FOV)), - attrs={'description': "type of surface:0-ocean," + + attrs={"description": "type of surface:0-ocean," + "1-sea ice,2-land,3-snow", - 'units': "1", - 'coordinates': "Longitude Latitude", - '_FillValue': -999, - 'valid_range': [0, 3] + "units": "1", + "coordinates": "Longitude Latitude", + "_FillValue": -999, + "valid_range": [0, 3] }, - dims=('Scanline', 'Field_of_view')) + dims=("Scanline", "Field_of_view")) latitude = xr.DataArray(DEFAULT_LAT.reshape(DEFAULT_2D_SHAPE), - attrs={'long_name': + attrs={"long_name": "Latitude of the view (-90,90)"}, - dims=('Scanline', 'Field_of_view')) + dims=("Scanline", "Field_of_view")) longitude = xr.DataArray(DEFAULT_LON.reshape(DEFAULT_2D_SHAPE), - attrs={'long_name': + attrs={"long_name": "Longitude of the view (-180,180)"}, - dims=('Scanline', 'Field_of_view')) + dims=("Scanline", "Field_of_view")) ds_vars = { - 'Freq': FREQ, - 'Polo': POLO, - 'BT': bt, - 'RR': rr, - 'Sfc_type': sfc_type, - 'Latitude': latitude, - 'Longitude': longitude + "Freq": FREQ, + "Polo": POLO, + "BT": bt, + "RR": rr, + "Sfc_type": sfc_type, + "Latitude": latitude, + "Longitude": longitude } - attrs = {'missing_value': -999.} + attrs = {"missing_value": -999.} ds = xr.Dataset(ds_vars, attrs=attrs) ds = ds.assign_coords({"Freq": FREQ, "Latitude": latitude, "Longitude": longitude}) return ds @@ -175,38 +175,38 @@ def _get_datasets_with_less_attributes(): """Represent files with two resolution of variables in them (ex. OCEAN).""" bt = xr.DataArray(np.linspace(1830, 3930, N_SCANLINE * N_FOV * N_CHANNEL). reshape(N_SCANLINE, N_FOV, N_CHANNEL), - attrs={'long_name': "Channel Temperature (K)", - 'scale_factor': 0.01}, - dims=('Scanline', 'Field_of_view', 'Channel')) + attrs={"long_name": "Channel Temperature (K)", + "scale_factor": 0.01}, + dims=("Scanline", "Field_of_view", "Channel")) rr = xr.DataArray(np.random.randint(100, 500, size=(N_SCANLINE, N_FOV)), - attrs={'long_name': "Rain Rate (mm/hr)", - 'scale_factor': 0.1}, - dims=('Scanline', 'Field_of_view')) + attrs={"long_name": "Rain Rate (mm/hr)", + "scale_factor": 0.1}, + dims=("Scanline", "Field_of_view")) sfc_type = xr.DataArray(np.random.randint(0, 4, size=(N_SCANLINE, N_FOV)), - attrs={'description': "type of surface:0-ocean," + + attrs={"description": "type of surface:0-ocean," + "1-sea ice,2-land,3-snow"}, - dims=('Scanline', 'Field_of_view')) + dims=("Scanline", "Field_of_view")) latitude = xr.DataArray(DEFAULT_LAT.reshape(DEFAULT_2D_SHAPE), - attrs={'long_name': + attrs={"long_name": "Latitude of the view (-90,90)"}, - dims=('Scanline', 'Field_of_view')) + dims=("Scanline", "Field_of_view")) longitude = xr.DataArray(DEFAULT_LON.reshape(DEFAULT_2D_SHAPE), attrs={"long_name": "Longitude of the view (-180,180)"}, - dims=('Scanline', 'Field_of_view')) + dims=("Scanline", "Field_of_view")) ds_vars = { - 'Freq': FREQ, - 'Polo': POLO, - 'BT': bt, - 'RR': rr, - 'Sfc_type': sfc_type, - 'Longitude': longitude, - 'Latitude': latitude + "Freq": FREQ, + "Polo": POLO, + "BT": bt, + "RR": rr, + "Sfc_type": sfc_type, + "Longitude": longitude, + "Latitude": latitude } - attrs = {'missing_value': -999.} + attrs = {"missing_value": -999.} ds = xr.Dataset(ds_vars, attrs=attrs) ds = ds.assign_coords({"Freq": FREQ, "Latitude": latitude, "Longitude": longitude}) return ds @@ -227,7 +227,7 @@ class TestMirsL2_NcReader: def setup_method(self): """Read fake data.""" from satpy._config import config_search_paths - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) @pytest.mark.parametrize( ("filenames", "expected_loadables"), @@ -240,7 +240,7 @@ def setup_method(self): def test_reader_creation(self, filenames, expected_loadables): """Test basic initialization.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.mirs.xr.open_dataset') as od: + with mock.patch("satpy.readers.mirs.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) @@ -260,7 +260,7 @@ def test_reader_creation(self, filenames, expected_loadables): def test_available_datasets(self, filenames, expected_datasets): """Test that variables are dynamically discovered.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.mirs.xr.open_dataset') as od: + with mock.patch("satpy.readers.mirs.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) @@ -272,12 +272,12 @@ def test_available_datasets(self, filenames, expected_datasets): @staticmethod def _check_area(data_arr): from pyresample.geometry import SwathDefinition - area = data_arr.attrs['area'] + area = data_arr.attrs["area"] assert isinstance(area, SwathDefinition) @staticmethod def _check_fill(data_arr): - assert '_FillValue' not in data_arr.attrs + assert "_FillValue" not in data_arr.attrs if np.issubdtype(data_arr.dtype, np.floating): # we started with float32, it should stay that way assert data_arr.dtype.type == np.float64 @@ -285,23 +285,23 @@ def _check_fill(data_arr): @staticmethod def _check_valid_range(data_arr, test_valid_range): # valid_range is popped out of data_arr.attrs when it is applied - assert 'valid_range' not in data_arr.attrs + assert "valid_range" not in data_arr.attrs assert data_arr.data.min() >= test_valid_range[0] assert data_arr.data.max() <= test_valid_range[1] @staticmethod def _check_fill_value(data_arr, test_fill_value): - assert '_FillValue' not in data_arr.attrs + assert "_FillValue" not in data_arr.attrs assert not (data_arr.data == test_fill_value).any() @staticmethod def _check_attrs(data_arr, platform_name): attrs = data_arr.attrs - assert 'scale_factor' not in attrs - assert 'platform_name' in attrs - assert attrs['platform_name'] == platform_name - assert attrs['start_time'] == START_TIME - assert attrs['end_time'] == END_TIME + assert "scale_factor" not in attrs + assert "platform_name" in attrs + assert attrs["platform_name"] == platform_name + assert attrs["start_time"] == START_TIME + assert attrs["end_time"] == END_TIME @pytest.mark.parametrize( ("filenames", "loadable_ids", "platform_name"), @@ -312,18 +312,18 @@ def _check_attrs(data_arr, platform_name): ([OTHER_MIRS_L2_SWATH], TEST_VARS, "gpm"), ] ) - @pytest.mark.parametrize('reader_kw', [{}, {'limb_correction': False}]) + @pytest.mark.parametrize("reader_kw", [{}, {"limb_correction": False}]) def test_basic_load(self, filenames, loadable_ids, platform_name, reader_kw): """Test that variables are loaded properly.""" from satpy.readers import load_reader - with mock.patch('satpy.readers.mirs.xr.open_dataset') as od: + with mock.patch("satpy.readers.mirs.xr.open_dataset") as od: od.side_effect = fake_open_dataset r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames(filenames) r.create_filehandlers(loadables, fh_kwargs=reader_kw) - with mock.patch('satpy.readers.mirs.read_atms_coeff_to_string') as \ - fd, mock.patch('satpy.readers.mirs.retrieve'): + with mock.patch("satpy.readers.mirs.read_atms_coeff_to_string") as \ + fd, mock.patch("satpy.readers.mirs.retrieve"): fd.side_effect = fake_coeff_from_fn loaded_data_arrs = r.load(loadable_ids) assert len(loaded_data_arrs) == len(loadable_ids) @@ -332,12 +332,12 @@ def test_basic_load(self, filenames, loadable_ids, for _data_id, data_arr in loaded_data_arrs.items(): data_arr = data_arr.compute() var_name = data_arr.attrs["name"] - if var_name not in ['latitude', 'longitude']: + if var_name not in ["latitude", "longitude"]: self._check_area(data_arr) self._check_fill(data_arr) self._check_attrs(data_arr, platform_name) - input_fake_data = test_data['BT'] if "btemp" in var_name \ + input_fake_data = test_data["BT"] if "btemp" in var_name \ else test_data[var_name] if "valid_range" in input_fake_data.attrs: valid_range = input_fake_data.attrs["valid_range"] @@ -346,9 +346,9 @@ def test_basic_load(self, filenames, loadable_ids, fill_value = input_fake_data.attrs["_FillValue"] self._check_fill_value(data_arr, fill_value) - sensor = data_arr.attrs['sensor'] - if reader_kw.get('limb_correction', True) and sensor == 'atms': + sensor = data_arr.attrs["sensor"] + if reader_kw.get("limb_correction", True) and sensor == "atms": fd.assert_called() else: fd.assert_not_called() - assert data_arr.attrs['units'] == DEFAULT_UNITS[var_name] + assert data_arr.attrs["units"] == DEFAULT_UNITS[var_name] diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 57ae3dfc31..bcee32ddbb 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -955,7 +955,7 @@ def test_xml_calibration_to_radiance(self): def test_xml_navigation(self): """Test the navigation.""" from pyproj import CRS - crs = CRS('EPSG:32616') + crs = CRS("EPSG:32616") dsid = make_dataid(name="B01", resolution=60) result = self.xml_tile_fh.get_area_def(dsid) @@ -976,7 +976,7 @@ def setup_method(self): self.tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_tile_xml), self.filename_info, mock.MagicMock()) - @pytest.mark.parametrize("mask_saturated,calibration,expected", + @pytest.mark.parametrize(("mask_saturated", "calibration", "expected"), [(True, "reflectance", [[np.nan, 0.01 - 10], [645.34, np.inf]]), (False, "reflectance", [[np.nan, 0.01 - 10], [645.34, 645.35]]), (True, "radiance", [[np.nan, -251.58426503], [16251.99095011, np.inf]])]) diff --git a/satpy/tests/reader_tests/test_msu_gsa_l1b.py b/satpy/tests/reader_tests/test_msu_gsa_l1b.py index a5efc52be6..f55c9638c8 100644 --- a/satpy/tests/reader_tests/test_msu_gsa_l1b.py +++ b/satpy/tests/reader_tests/test_msu_gsa_l1b.py @@ -27,7 +27,7 @@ from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler from satpy.tests.utils import make_dataid -SOLCONST = '273.59' +SOLCONST = "273.59" class FakeHDF5FileHandler2(FakeHDF5FileHandler): @@ -35,70 +35,70 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler): def _get_data(self, num_scans, num_cols): data = { - 'Data/resolution_1km/Solar_Zenith_Angle': + "Data/resolution_1km/Solar_Zenith_Angle": xr.DataArray( da.ones((num_scans*4, num_cols*4), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), - 'Geolocation/resolution_1km/Latitude': + dims=("x", "y")), + "Geolocation/resolution_1km/Latitude": xr.DataArray( da.ones((num_scans*4, num_cols*4), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), - 'Geolocation/resolution_1km/Longitude': + dims=("x", "y")), + "Geolocation/resolution_1km/Longitude": xr.DataArray( da.ones((num_scans*4, num_cols*4), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), - 'Data/resolution_1km/Radiance_01': + dims=("x", "y")), + "Data/resolution_1km/Radiance_01": xr.DataArray( da.ones((num_scans*4, num_cols*4), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999., 'F_solar_constant': SOLCONST + "scale": 0.01, "offset": 0., "fill_value": -999., "F_solar_constant": SOLCONST }, - dims=('x', 'y')), - 'Data/resolution_4km/Solar_Zenith_Angle': + dims=("x", "y")), + "Data/resolution_4km/Solar_Zenith_Angle": xr.DataArray( da.ones((num_scans, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), - 'Geolocation/resolution_4km/Latitude': + dims=("x", "y")), + "Geolocation/resolution_4km/Latitude": xr.DataArray( da.ones((num_scans, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), - 'Geolocation/resolution_4km/Longitude': + dims=("x", "y")), + "Geolocation/resolution_4km/Longitude": xr.DataArray( da.ones((num_scans, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), - 'Data/resolution_4km/Brightness_Temperature_09': + dims=("x", "y")), + "Data/resolution_4km/Brightness_Temperature_09": xr.DataArray( da.ones((num_scans, num_cols), chunks=1024, dtype=np.uint16), attrs={ - 'scale': 0.01, 'offset': 0., 'fill_value': -999. + "scale": 0.01, "offset": 0., "fill_value": -999. }, - dims=('x', 'y')), + dims=("x", "y")), } return data @@ -107,10 +107,10 @@ def get_test_content(self, filename, filename_info, filetype_info): num_scans = 20 num_cols = 2048 global_attrs = { - '/attr/timestamp_without_timezone': '2022-01-13T12:45:00', - '/attr/satellite_observation_point_height': '38500.0', - '/attr/satellite_observation_point_latitude': '71.25', - '/attr/satellite_observation_point_longitude': '21.44', + "/attr/timestamp_without_timezone": "2022-01-13T12:45:00", + "/attr/satellite_observation_point_height": "38500.0", + "/attr/satellite_observation_point_latitude": "71.25", + "/attr/satellite_observation_point_longitude": "21.44", } data = self._get_data(num_scans, num_cols) @@ -131,13 +131,13 @@ def setup_method(self): from satpy._config import config_search_paths from satpy.readers import load_reader from satpy.readers.msu_gsa_l1b import MSUGSAFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(MSUGSAFileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(MSUGSAFileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True - filenames = ['ArcticaM1_202201131245.h5'] + filenames = ["ArcticaM1_202201131245.h5"] self.reader = load_reader(self.reader_configs) files = self.reader.select_files_from_pathnames(filenames) self.reader.create_filehandlers(files) @@ -148,34 +148,34 @@ def teardown_method(self): def test_irbt(self): """Test retrieval in brightness temperature.""" - ds_ids = [make_dataid(name='C09', calibration='brightness_temperature')] + ds_ids = [make_dataid(name="C09", calibration="brightness_temperature")] res = self.reader.load(ds_ids) - assert 'C09' in res - assert res['C09'].attrs['calibration'] == 'brightness_temperature' - assert res['C09'].attrs['platform_name'] == 'Arctica-M-N1' - assert res['C09'].attrs['sat_latitude'] == 71.25 - assert res['C09'].attrs['sat_longitude'] == 21.44 - assert res['C09'].attrs['sat_altitude'] == 38500. - assert res['C09'].attrs['resolution'] == 4000 + assert "C09" in res + assert res["C09"].attrs["calibration"] == "brightness_temperature" + assert res["C09"].attrs["platform_name"] == "Arctica-M-N1" + assert res["C09"].attrs["sat_latitude"] == 71.25 + assert res["C09"].attrs["sat_longitude"] == 21.44 + assert res["C09"].attrs["sat_altitude"] == 38500. + assert res["C09"].attrs["resolution"] == 4000 def test_nocounts(self): """Test we can't get IR or VIS data as counts.""" - ds_ids = [make_dataid(name='C01', calibration='counts')] + ds_ids = [make_dataid(name="C01", calibration="counts")] with pytest.raises(KeyError): self.reader.load(ds_ids) - ds_ids = [make_dataid(name='C09', calibration='counts')] + ds_ids = [make_dataid(name="C09", calibration="counts")] with pytest.raises(KeyError): self.reader.load(ds_ids) def test_vis_cal(self): """Test that we can retrieve VIS data as both radiance and reflectance.""" - ds_ids = [make_dataid(name='C01', calibration='radiance')] + ds_ids = [make_dataid(name="C01", calibration="radiance")] res = self.reader.load(ds_ids) - rad = res['C01'].data - ds_ids = [make_dataid(name='C01', calibration='reflectance')] + rad = res["C01"].data + ds_ids = [make_dataid(name="C01", calibration="reflectance")] res = self.reader.load(ds_ids) - refl = res['C01'].data + refl = res["C01"].data # Check the RAD->REFL conversion np.testing.assert_allclose(100 * np.pi * rad / float(SOLCONST), refl) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 749386bb98..b03336c230 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -44,27 +44,27 @@ # - request attrs_exp: dict = { - 'platform': 'MET7', - 'raw_metadata': {'foo': 'bar'}, - 'sensor': 'MVIRI', - 'orbital_parameters': { - 'projection_longitude': 57.0, - 'projection_latitude': 0.0, - 'projection_altitude': 35785860.0, - 'satellite_actual_longitude': 57.1, - 'satellite_actual_latitude': 0.1, + "platform": "MET7", + "raw_metadata": {"foo": "bar"}, + "sensor": "MVIRI", + "orbital_parameters": { + "projection_longitude": 57.0, + "projection_latitude": 0.0, + "projection_altitude": 35785860.0, + "satellite_actual_longitude": 57.1, + "satellite_actual_latitude": 0.1, } } attrs_refl_exp = attrs_exp.copy() attrs_refl_exp.update( - {'sun_earth_distance_correction_applied': True, - 'sun_earth_distance_correction_factor': 1.} + {"sun_earth_distance_correction_applied": True, + "sun_earth_distance_correction_factor": 1.} ) -acq_time_vis_exp = [np.datetime64('1970-01-01 00:30'), - np.datetime64('1970-01-01 00:30'), - np.datetime64('1970-01-01 02:30'), - np.datetime64('1970-01-01 02:30')] +acq_time_vis_exp = [np.datetime64("1970-01-01 00:30"), + np.datetime64("1970-01-01 00:30"), + np.datetime64("1970-01-01 02:30"), + np.datetime64("1970-01-01 02:30")] vis_counts_exp = xr.DataArray( np.array( [[0., 17., 34., 51.], @@ -73,9 +73,9 @@ [204., 221., 238., 255]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_vis_exp), + "acq_time": ("y", acq_time_vis_exp), }, attrs=attrs_exp ) @@ -87,9 +87,9 @@ [235.48, 255.2, 274.92, 294.64]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_vis_exp), + "acq_time": ("y", acq_time_vis_exp), }, attrs=attrs_exp ) @@ -104,9 +104,9 @@ # (0, 0) and (2, 2) are NaN because radiance is NaN # (0, 2) is NaN because SZA >= 90 degrees # Last row/col is NaN due to SZA interpolation - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_vis_exp), + "acq_time": ("y", acq_time_vis_exp), }, attrs=attrs_refl_exp ) @@ -118,23 +118,23 @@ [1.3, 1.4, 1.5, 1.6]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_vis_exp), + "acq_time": ("y", acq_time_vis_exp), }, attrs=attrs_exp ) -acq_time_ir_wv_exp = [np.datetime64('1970-01-01 00:30'), - np.datetime64('1970-01-01 02:30')] +acq_time_ir_wv_exp = [np.datetime64("1970-01-01 00:30"), + np.datetime64("1970-01-01 02:30")] wv_counts_exp = xr.DataArray( np.array( [[0, 85], [170, 255]], dtype=np.uint8 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_ir_wv_exp), + "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) @@ -144,9 +144,9 @@ [8, 12.25]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_ir_wv_exp), + "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) @@ -156,9 +156,9 @@ [252.507448, 266.863289]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_ir_wv_exp), + "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) @@ -168,9 +168,9 @@ [170, 255]], dtype=np.uint8 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_ir_wv_exp), + "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) @@ -180,9 +180,9 @@ [165, 250]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_ir_wv_exp), + "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) @@ -192,9 +192,9 @@ [204.32955838, 223.28709913]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_ir_wv_exp), + "acq_time": ("y", acq_time_ir_wv_exp), }, attrs=attrs_exp ) @@ -206,9 +206,9 @@ [0, 0, 0, 0]], dtype=np.uint8 ), - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'acq_time': ('y', acq_time_vis_exp), + "acq_time": ("y", acq_time_vis_exp), }, attrs=attrs_exp ) @@ -220,7 +220,7 @@ [np.nan, np.nan, np.nan, np.nan]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), attrs=attrs_exp ) sza_ir_wv_exp = xr.DataArray( @@ -229,33 +229,33 @@ [0, 45]], dtype=np.float32 ), - dims=('y', 'x'), + dims=("y", "x"), attrs=attrs_exp ) area_vis_exp = AreaDefinition( - area_id='geos_mviri_4x4', - proj_id='geos_mviri_4x4', - description='MVIRI Geostationary Projection', + area_id="geos_mviri_4x4", + proj_id="geos_mviri_4x4", + description="MVIRI Geostationary Projection", projection={ - 'proj': 'geos', - 'lon_0': 57.0, - 'h': ALTITUDE, - 'a': EQUATOR_RADIUS, - 'b': POLE_RADIUS + "proj": "geos", + "lon_0": 57.0, + "h": ALTITUDE, + "a": EQUATOR_RADIUS, + "b": POLE_RADIUS }, width=4, height=4, area_extent=[5621229.74392, 5621229.74392, -5621229.74392, -5621229.74392] ) area_ir_wv_exp = area_vis_exp.copy( - area_id='geos_mviri_2x2', - proj_id='geos_mviri_2x2', + area_id="geos_mviri_2x2", + proj_id="geos_mviri_2x2", width=2, height=2 ) -@pytest.fixture(name='fake_dataset') +@pytest.fixture(name="fake_dataset") def fixture_fake_dataset(): """Create fake dataset.""" count_ir = da.linspace(0, 255, 4, dtype=np.uint8).reshape(2, 2) @@ -277,55 +277,55 @@ def fixture_fake_dataset(): dtype=np.uint8 ) ) - time = np.arange(4).astype('datetime64[h]').reshape(2, 2) + time = np.arange(4).astype("datetime64[h]").reshape(2, 2) ds = xr.Dataset( data_vars={ - 'count_vis': (('y', 'x'), count_vis), - 'count_wv': (('y_ir_wv', 'x_ir_wv'), count_wv), - 'count_ir': (('y_ir_wv', 'x_ir_wv'), count_ir), - 'toa_bidirectional_reflectance_vis': vis_refl_exp / 100, - 'u_independent_toa_bidirectional_reflectance': u_vis_refl_exp / 100, - 'quality_pixel_bitmask': (('y', 'x'), mask), - 'solar_zenith_angle': (('y_tie', 'x_tie'), sza), - 'time_ir_wv': (('y_ir_wv', 'x_ir_wv'), time), - 'a_ir': -5.0, - 'b_ir': 1.0, - 'bt_a_ir': 10.0, - 'bt_b_ir': -1000.0, - 'a_wv': -0.5, - 'b_wv': 0.05, - 'bt_a_wv': 10.0, - 'bt_b_wv': -2000.0, - 'years_since_launch': 20.0, - 'a0_vis': 1.0, - 'a1_vis': 0.01, - 'a2_vis': -0.0001, - 'mean_count_space_vis': 1.0, - 'distance_sun_earth': 1.0, - 'solar_irradiance_vis': 650.0, - 'sub_satellite_longitude_start': 57.1, - 'sub_satellite_longitude_end': np.nan, - 'sub_satellite_latitude_start': np.nan, - 'sub_satellite_latitude_end': 0.1, + "count_vis": (("y", "x"), count_vis), + "count_wv": (("y_ir_wv", "x_ir_wv"), count_wv), + "count_ir": (("y_ir_wv", "x_ir_wv"), count_ir), + "toa_bidirectional_reflectance_vis": vis_refl_exp / 100, + "u_independent_toa_bidirectional_reflectance": u_vis_refl_exp / 100, + "quality_pixel_bitmask": (("y", "x"), mask), + "solar_zenith_angle": (("y_tie", "x_tie"), sza), + "time_ir_wv": (("y_ir_wv", "x_ir_wv"), time), + "a_ir": -5.0, + "b_ir": 1.0, + "bt_a_ir": 10.0, + "bt_b_ir": -1000.0, + "a_wv": -0.5, + "b_wv": 0.05, + "bt_a_wv": 10.0, + "bt_b_wv": -2000.0, + "years_since_launch": 20.0, + "a0_vis": 1.0, + "a1_vis": 0.01, + "a2_vis": -0.0001, + "mean_count_space_vis": 1.0, + "distance_sun_earth": 1.0, + "solar_irradiance_vis": 650.0, + "sub_satellite_longitude_start": 57.1, + "sub_satellite_longitude_end": np.nan, + "sub_satellite_latitude_start": np.nan, + "sub_satellite_latitude_end": 0.1, }, coords={ - 'y': [1, 2, 3, 4], - 'x': [1, 2, 3, 4], - 'y_ir_wv': [1, 2], - 'x_ir_wv': [1, 2], - 'y_tie': [1, 2], - 'x_tie': [1, 2] + "y": [1, 2, 3, 4], + "x": [1, 2, 3, 4], + "y_ir_wv": [1, 2], + "x_ir_wv": [1, 2], + "y_tie": [1, 2], + "x_tie": [1, 2] }, - attrs={'foo': 'bar'} + attrs={"foo": "bar"} ) - ds['count_ir'].attrs['ancillary_variables'] = 'a_ir b_ir' - ds['count_wv'].attrs['ancillary_variables'] = 'a_wv b_wv' + ds["count_ir"].attrs["ancillary_variables"] = "a_ir b_ir" + ds["count_wv"].attrs["ancillary_variables"] = "a_wv b_wv" return ds @pytest.fixture( - name='file_handler', + name="file_handler", params=[FiduceoMviriEasyFcdrFileHandler, FiduceoMviriFullFcdrFileHandler] ) @@ -334,21 +334,21 @@ def fixture_file_handler(fake_dataset, request): marker = request.node.get_closest_marker("file_handler_data") mask_bad_quality = True if marker: - mask_bad_quality = marker.kwargs['mask_bad_quality'] + mask_bad_quality = marker.kwargs["mask_bad_quality"] fh_class = request.param - with mock.patch('satpy.readers.mviri_l1b_fiduceo_nc.xr.open_dataset') as open_dataset: + with mock.patch("satpy.readers.mviri_l1b_fiduceo_nc.xr.open_dataset") as open_dataset: open_dataset.return_value = fake_dataset return fh_class( - filename='filename', - filename_info={'platform': 'MET7', - 'sensor': 'MVIRI', - 'projection_longitude': '57.0'}, - filetype_info={'foo': 'bar'}, + filename="filename", + filename_info={"platform": "MET7", + "sensor": "MVIRI", + "projection_longitude": "57.0"}, + filetype_info={"foo": "bar"}, mask_bad_quality=mask_bad_quality ) -@pytest.fixture(name='reader') +@pytest.fixture(name="reader") def fixture_reader(): """Return MVIRI FIDUCEO FCDR reader.""" from satpy._config import config_search_paths @@ -369,38 +369,38 @@ def test_init(self, file_handler): assert file_handler.mask_bad_quality is True @pytest.mark.parametrize( - ('name', 'calibration', 'resolution', 'expected'), + ("name", "calibration", "resolution", "expected"), [ - ('VIS', 'counts', 2250, vis_counts_exp), - ('VIS', 'radiance', 2250, vis_rad_exp), - ('VIS', 'reflectance', 2250, vis_refl_exp), - ('WV', 'counts', 4500, wv_counts_exp), - ('WV', 'radiance', 4500, wv_rad_exp), - ('WV', 'brightness_temperature', 4500, wv_bt_exp), - ('IR', 'counts', 4500, ir_counts_exp), - ('IR', 'radiance', 4500, ir_rad_exp), - ('IR', 'brightness_temperature', 4500, ir_bt_exp), - ('quality_pixel_bitmask', None, 2250, quality_pixel_bitmask_exp), - ('solar_zenith_angle', None, 2250, sza_vis_exp), - ('solar_zenith_angle', None, 4500, sza_ir_wv_exp), - ('u_independent_toa_bidirectional_reflectance', None, 4500, u_vis_refl_exp) + ("VIS", "counts", 2250, vis_counts_exp), + ("VIS", "radiance", 2250, vis_rad_exp), + ("VIS", "reflectance", 2250, vis_refl_exp), + ("WV", "counts", 4500, wv_counts_exp), + ("WV", "radiance", 4500, wv_rad_exp), + ("WV", "brightness_temperature", 4500, wv_bt_exp), + ("IR", "counts", 4500, ir_counts_exp), + ("IR", "radiance", 4500, ir_rad_exp), + ("IR", "brightness_temperature", 4500, ir_bt_exp), + ("quality_pixel_bitmask", None, 2250, quality_pixel_bitmask_exp), + ("solar_zenith_angle", None, 2250, sza_vis_exp), + ("solar_zenith_angle", None, 4500, sza_ir_wv_exp), + ("u_independent_toa_bidirectional_reflectance", None, 4500, u_vis_refl_exp) ] ) def test_get_dataset(self, file_handler, name, calibration, resolution, expected): """Test getting datasets.""" - id_keys = {'name': name, 'resolution': resolution} + id_keys = {"name": name, "resolution": resolution} if calibration: - id_keys['calibration'] = calibration + id_keys["calibration"] = calibration dataset_id = make_dataid(**id_keys) - dataset_info = {'platform': 'MET7'} + dataset_info = {"platform": "MET7"} is_easy = isinstance(file_handler, FiduceoMviriEasyFcdrFileHandler) - is_vis = name == 'VIS' - is_refl = calibration == 'reflectance' + is_vis = name == "VIS" + is_refl = calibration == "reflectance" if is_easy and is_vis and not is_refl: # VIS counts/radiance not available in easy FCDR - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Cannot calibrate to .*. Easy FCDR provides reflectance only."): file_handler.get_dataset(dataset_id, dataset_info) else: ds = file_handler.get_dataset(dataset_id, dataset_info) @@ -412,34 +412,34 @@ def test_get_dataset_corrupt(self, file_handler): """Test getting datasets with known corruptions.""" # Time may have different names and satellite position might be missing file_handler.nc.nc = file_handler.nc.nc.rename( - {'time_ir_wv': 'time'} + {"time_ir_wv": "time"} ) file_handler.nc.nc = file_handler.nc.nc.drop_vars( - ['sub_satellite_longitude_start'] + ["sub_satellite_longitude_start"] ) dataset_id = make_dataid( - name='VIS', - calibration='reflectance', + name="VIS", + calibration="reflectance", resolution=2250 ) - ds = file_handler.get_dataset(dataset_id, {'platform': 'MET7'}) - assert 'actual_satellite_longitude' not in ds.attrs['orbital_parameters'] - assert 'actual_satellite_latitude' not in ds.attrs['orbital_parameters'] + ds = file_handler.get_dataset(dataset_id, {"platform": "MET7"}) + assert "actual_satellite_longitude" not in ds.attrs["orbital_parameters"] + assert "actual_satellite_latitude" not in ds.attrs["orbital_parameters"] xr.testing.assert_allclose(ds, vis_refl_exp) @mock.patch( - 'satpy.readers.mviri_l1b_fiduceo_nc.Interpolator.interp_acq_time' + "satpy.readers.mviri_l1b_fiduceo_nc.Interpolator.interp_acq_time" ) def test_time_cache(self, interp_acq_time, file_handler): """Test caching of acquisition times.""" dataset_id = make_dataid( - name='VIS', + name="VIS", resolution=2250, - calibration='reflectance' + calibration="reflectance" ) info = {} - interp_acq_time.return_value = xr.DataArray([1, 2, 3, 4], dims='y') + interp_acq_time.return_value = xr.DataArray([1, 2, 3, 4], dims="y") # Cache init file_handler.get_dataset(dataset_id, info) @@ -451,22 +451,22 @@ def test_time_cache(self, interp_acq_time, file_handler): interp_acq_time.assert_not_called() # Cache miss - interp_acq_time.return_value = xr.DataArray([1, 2], dims='y') + interp_acq_time.return_value = xr.DataArray([1, 2], dims="y") another_id = make_dataid( - name='IR', + name="IR", resolution=4500, - calibration='brightness_temperature' + calibration="brightness_temperature" ) interp_acq_time.reset_mock() file_handler.get_dataset(another_id, info) interp_acq_time.assert_called() @mock.patch( - 'satpy.readers.mviri_l1b_fiduceo_nc.Interpolator.interp_tiepoints' + "satpy.readers.mviri_l1b_fiduceo_nc.Interpolator.interp_tiepoints" ) def test_angle_cache(self, interp_tiepoints, file_handler): """Test caching of angle datasets.""" - dataset_id = make_dataid(name='solar_zenith_angle', + dataset_id = make_dataid(name="solar_zenith_angle", resolution=2250) info = {} @@ -480,21 +480,21 @@ def test_angle_cache(self, interp_tiepoints, file_handler): interp_tiepoints.assert_not_called() # Cache miss - another_id = make_dataid(name='solar_zenith_angle', + another_id = make_dataid(name="solar_zenith_angle", resolution=4500) interp_tiepoints.reset_mock() file_handler.get_dataset(another_id, info) interp_tiepoints.assert_called() @pytest.mark.parametrize( - ('name', 'resolution', 'area_exp'), + ("name", "resolution", "area_exp"), [ - ('VIS', 2250, area_vis_exp), - ('WV', 4500, area_ir_wv_exp), - ('IR', 4500, area_ir_wv_exp), - ('quality_pixel_bitmask', 2250, area_vis_exp), - ('solar_zenith_angle', 2250, area_vis_exp), - ('solar_zenith_angle', 4500, area_ir_wv_exp) + ("VIS", 2250, area_vis_exp), + ("WV", 4500, area_ir_wv_exp), + ("IR", 4500, area_ir_wv_exp), + ("quality_pixel_bitmask", 2250, area_vis_exp), + ("solar_zenith_angle", 2250, area_vis_exp), + ("solar_zenith_angle", 4500, area_ir_wv_exp) ] ) def test_get_area_definition(self, file_handler, name, resolution, @@ -508,7 +508,7 @@ def test_get_area_definition(self, file_handler, name, resolution, assert b == b_exp assert area.width == area_exp.width assert area.height == area_exp.height - for key in ['h', 'lon_0', 'proj', 'units']: + for key in ["h", "lon_0", "proj", "units"]: assert area.proj_dict[key] == area_exp.proj_dict[key] np.testing.assert_allclose(area.area_extent, area_exp.area_extent) @@ -516,38 +516,38 @@ def test_calib_exceptions(self, file_handler): """Test calibration exceptions.""" with pytest.raises(KeyError): file_handler.get_dataset( - make_dataid(name='solar_zenith_angle', calibration='counts'), + make_dataid(name="solar_zenith_angle", calibration="counts"), {} ) with pytest.raises(KeyError): file_handler.get_dataset( make_dataid( - name='VIS', + name="VIS", resolution=2250, - calibration='brightness_temperature'), + calibration="brightness_temperature"), {} ) with pytest.raises(KeyError): file_handler.get_dataset( make_dataid( - name='IR', + name="IR", resolution=4500, - calibration='reflectance'), + calibration="reflectance"), {} ) if isinstance(file_handler, FiduceoMviriEasyFcdrFileHandler): with pytest.raises(KeyError): file_handler.get_dataset( - {'name': 'VIS', 'calibration': 'counts'}, + {"name": "VIS", "calibration": "counts"}, {} ) # not available in easy FCDR @pytest.mark.file_handler_data(mask_bad_quality=False) def test_bad_quality_warning(self, file_handler): """Test warning about bad VIS quality.""" - file_handler.nc.nc['quality_pixel_bitmask'] = 2 - vis = make_dataid(name='VIS', resolution=2250, - calibration='reflectance') + file_handler.nc.nc["quality_pixel_bitmask"] = 2 + vis = make_dataid(name="VIS", resolution=2250, + calibration="reflectance") with pytest.warns(UserWarning): file_handler.get_dataset(vis, {}) @@ -579,25 +579,25 @@ def test_reassign_coords(self): """ nc = mock.MagicMock( coords={ - 'y': [.1, .2], - 'x': [.3, .4] + "y": [.1, .2], + "x": [.3, .4] }, - dims=('y', 'x') + dims=("y", "x") ) nc.__getitem__.return_value = xr.DataArray( [[1, 2], [3, 4]], - dims=('y', 'x') + dims=("y", "x") ) foo_exp = xr.DataArray( [[1, 2], [3, 4]], - dims=('y', 'x'), + dims=("y", "x"), coords={ - 'y': [.1, .2], - 'x': [.3, .4] + "y": [.1, .2], + "x": [.3, .4] } ) ds = DatasetWrapper(nc) - foo = ds['foo'] + foo = ds["foo"] xr.testing.assert_equal(foo, foo_exp) diff --git a/satpy/tests/reader_tests/test_mws_l1b_nc.py b/satpy/tests/reader_tests/test_mws_l1b_nc.py index a304b18782..2d227822a4 100644 --- a/satpy/tests/reader_tests/test_mws_l1b_nc.py +++ b/satpy/tests/reader_tests/test_mws_l1b_nc.py @@ -43,40 +43,40 @@ N_PRTS = 6 -@pytest.fixture +@pytest.fixture() def reader(fake_file): """Return reader of mws level-1b data.""" return MWSL1BFile( filename=fake_file, filename_info={ - 'start_time': ( - datetime.fromisoformat('2000-01-01T01:00:00') + "start_time": ( + datetime.fromisoformat("2000-01-01T01:00:00") ), - 'end_time': ( - datetime.fromisoformat('2000-01-01T02:00:00') + "end_time": ( + datetime.fromisoformat("2000-01-01T02:00:00") ), - 'creation_time': ( - datetime.fromisoformat('2000-01-01T03:00:00') + "creation_time": ( + datetime.fromisoformat("2000-01-01T03:00:00") ), }, filetype_info={ - 'longitude': 'data/navigation_data/mws_lon', - 'latitude': 'data/navigation_data/mws_lat', - 'solar_azimuth': 'data/navigation/mws_solar_azimuth_angle', - 'solar_zenith': 'data/navigation/mws_solar_zenith_angle', - 'satellite_azimuth': 'data/navigation/mws_satellite_azimuth_angle', - 'satellite_zenith': 'data/navigation/mws_satellite_zenith_angle', + "longitude": "data/navigation_data/mws_lon", + "latitude": "data/navigation_data/mws_lat", + "solar_azimuth": "data/navigation/mws_solar_azimuth_angle", + "solar_zenith": "data/navigation/mws_solar_zenith_angle", + "satellite_azimuth": "data/navigation/mws_satellite_azimuth_angle", + "satellite_zenith": "data/navigation/mws_satellite_zenith_angle", } ) -@pytest.fixture +@pytest.fixture() def fake_file(tmp_path): """Return file path to level-1b file.""" - file_path = tmp_path / 'test_file_mws_l1b.nc' + file_path = tmp_path / "test_file_mws_l1b.nc" writer = MWSL1BFakeFileWriter(file_path) writer.write() - yield file_path + return file_path class MWSL1BFakeFileWriter: @@ -88,11 +88,11 @@ def __init__(self, file_path): def write(self): """Write fake data to file.""" - with Dataset(self.file_path, 'w') as dataset: + with Dataset(self.file_path, "w") as dataset: self._write_attributes(dataset) self._write_status_group(dataset) self._write_quality_group(dataset) - data_group = dataset.createGroup('data') + data_group = dataset.createGroup("data") self._create_scan_dimensions(data_group) self._write_navigation_data_group(data_group) self._write_calibration_data_group(data_group) @@ -109,45 +109,45 @@ def _write_attributes(dataset): @staticmethod def _write_status_group(dataset): """Write the status group.""" - group = dataset.createGroup('/status/satellite') + group = dataset.createGroup("/status/satellite") subsat_latitude_start = group.createVariable( - 'subsat_latitude_start', "f4" + "subsat_latitude_start", "f4" ) subsat_latitude_start[:] = 52.19 subsat_longitude_start = group.createVariable( - 'subsat_longitude_start', "f4" + "subsat_longitude_start", "f4" ) subsat_longitude_start[:] = 23.26 subsat_latitude_end = group.createVariable( - 'subsat_latitude_end', "f4" + "subsat_latitude_end", "f4" ) subsat_latitude_end[:] = 60.00 subsat_longitude_end = group.createVariable( - 'subsat_longitude_end', "f4" + "subsat_longitude_end", "f4" ) subsat_longitude_end[:] = 2.47 @staticmethod def _write_quality_group(dataset): """Write the quality group.""" - group = dataset.createGroup('quality') + group = dataset.createGroup("quality") group.overall_quality_flag = 0 duration_of_product = group.createVariable( - 'duration_of_product', "f4" + "duration_of_product", "f4" ) duration_of_product[:] = 5944. @staticmethod def _write_navigation_data_group(dataset): """Write the navigation data group.""" - group = dataset.createGroup('navigation') - dimensions = ('n_scans', 'n_fovs') + group = dataset.createGroup("navigation") + dimensions = ("n_scans", "n_fovs") shape = (N_SCANS, N_FOVS) longitude = group.createVariable( - 'mws_lon', + "mws_lon", np.int32, dimensions=dimensions, ) @@ -157,14 +157,14 @@ def _write_navigation_data_group(dataset): longitude[:] = 35.7535 * np.ones(shape) latitude = group.createVariable( - 'mws_lat', + "mws_lat", np.float32, dimensions=dimensions, ) latitude[:] = 2. * np.ones(shape) azimuth = group.createVariable( - 'mws_solar_azimuth_angle', + "mws_solar_azimuth_angle", np.float32, dimensions=dimensions, ) @@ -173,19 +173,19 @@ def _write_navigation_data_group(dataset): @staticmethod def _create_scan_dimensions(dataset): """Create the scan/fovs dimensions.""" - dataset.createDimension('n_channels', N_CHANNELS) - dataset.createDimension('n_channels_os', N_CHANNELS_OS) - dataset.createDimension('n_scans', N_SCANS) - dataset.createDimension('n_fovs', N_FOVS) - dataset.createDimension('n_prts', N_PRTS) - dataset.createDimension('n_fovs_cal', N_FOVS_CAL) + dataset.createDimension("n_channels", N_CHANNELS) + dataset.createDimension("n_channels_os", N_CHANNELS_OS) + dataset.createDimension("n_scans", N_SCANS) + dataset.createDimension("n_fovs", N_FOVS) + dataset.createDimension("n_prts", N_PRTS) + dataset.createDimension("n_fovs_cal", N_FOVS_CAL) @staticmethod def _write_calibration_data_group(dataset): """Write the calibration data group.""" - group = dataset.createGroup('calibration') + group = dataset.createGroup("calibration") toa_bt = group.createVariable( - 'mws_toa_brightness_temperature', np.float32, dimensions=('n_scans', 'n_fovs', 'n_channels',) + "mws_toa_brightness_temperature", np.float32, dimensions=("n_scans", "n_fovs", "n_channels",) ) toa_bt.scale_factor = 1.0 # 1.0E-8 toa_bt.add_offset = 0.0 @@ -195,9 +195,9 @@ def _write_calibration_data_group(dataset): @staticmethod def _write_measurement_data_group(dataset): """Write the measurement data group.""" - group = dataset.createGroup('measurement') + group = dataset.createGroup("measurement") counts = group.createVariable( - 'mws_earth_view_counts', np.int32, dimensions=('n_scans', 'n_fovs', 'n_channels',) + "mws_earth_view_counts", np.int32, dimensions=("n_scans", "n_fovs", "n_channels",) ) counts[:] = 24100 * np.ones((N_SCANS, N_FOVS, N_CHANNELS), dtype=np.int32) @@ -239,9 +239,9 @@ def test_sub_satellite_latitude_end(self, reader): def test_get_dataset_get_channeldata_counts(self, reader): """Test getting channel data.""" - dataset_id = {'name': '1', 'units': None, - 'calibration': 'counts'} - dataset_info = {'file_key': 'data/measurement/mws_earth_view_counts'} + dataset_id = {"name": "1", "units": None, + "calibration": "counts"} + dataset_info = {"file_key": "data/measurement/mws_earth_view_counts"} dataset = reader.get_dataset(dataset_id, dataset_info) expected_bt = np.array([[24100, 24100], @@ -251,9 +251,9 @@ def test_get_dataset_get_channeldata_counts(self, reader): def test_get_dataset_get_channeldata_bts(self, reader): """Test getting channel data.""" - dataset_id = {'name': '1', 'units': 'K', - 'calibration': 'brightness_temperature'} - dataset_info = {'file_key': 'data/calibration/mws_toa_brightness_temperature'} + dataset_id = {"name": "1", "units": "K", + "calibration": "brightness_temperature"} + dataset_info = {"file_key": "data/calibration/mws_toa_brightness_temperature"} dataset = reader.get_dataset(dataset_id, dataset_info) @@ -268,15 +268,15 @@ def test_get_dataset_get_channeldata_bts(self, reader): def test_get_dataset_return_none_if_data_not_exist(self, reader): """Test get dataset return none if data does not exist.""" - dataset_id = {'name': 'unknown'} - dataset_info = {'file_key': 'non/existing/data'} + dataset_id = {"name": "unknown"} + dataset_info = {"file_key": "non/existing/data"} dataset = reader.get_dataset(dataset_id, dataset_info) assert dataset is None def test_get_navigation_longitudes(self, caplog, fake_file, reader): """Test get the longitudes.""" - dataset_id = {'name': 'mws_lon'} - dataset_info = {'file_key': 'data/navigation_data/mws_lon'} + dataset_id = {"name": "mws_lon"} + dataset_info = {"file_key": "data/navigation_data/mws_lon"} dataset = reader.get_dataset(dataset_id, dataset_info) @@ -291,8 +291,8 @@ def test_get_navigation_longitudes(self, caplog, fake_file, reader): def test_get_dataset_logs_debug_message(self, caplog, fake_file, reader): """Test get dataset return none if data does not exist.""" - dataset_id = {'name': 'mws_lon'} - dataset_info = {'file_key': 'data/navigation_data/mws_lon'} + dataset_id = {"name": "mws_lon"} + dataset_info = {"file_key": "data/navigation_data/mws_lon"} with caplog.at_level(logging.DEBUG): _ = reader.get_dataset(dataset_id, dataset_info) @@ -302,8 +302,8 @@ def test_get_dataset_logs_debug_message(self, caplog, fake_file, reader): def test_get_dataset_aux_data_not_supported(self, reader): """Test get auxillary dataset not supported.""" - dataset_id = {'name': 'scantime_utc'} - dataset_info = {'file_key': 'non/existing'} + dataset_id = {"name": "scantime_utc"} + dataset_info = {"file_key": "non/existing"} with pytest.raises(NotImplementedError) as exec_info: _ = reader.get_dataset(dataset_id, dataset_info) @@ -312,8 +312,8 @@ def test_get_dataset_aux_data_not_supported(self, reader): def test_get_dataset_aux_data_expected_data_missing(self, caplog, reader): """Test get auxillary dataset which is not present but supposed to be in file.""" - dataset_id = {'name': 'surface_type'} - dataset_info = {'file_key': 'non/existing'} + dataset_id = {"name": "surface_type"} + dataset_info = {"file_key": "non/existing"} with caplog.at_level(logging.ERROR): with pytest.raises(KeyError) as exec_info: @@ -325,10 +325,10 @@ def test_get_dataset_aux_data_expected_data_missing(self, caplog, reader): " no valid Dataset created") assert log_output in caplog.text - @pytest.mark.parametrize('dims', ( - ('n_scans', 'n_fovs'), - ('x', 'y'), - )) + @pytest.mark.parametrize("dims", [ + ("n_scans", "n_fovs"), + ("x", "y"), + ]) def test_standardize_dims(self, reader, dims): """Test standardize dims.""" variable = xr.DataArray( @@ -336,7 +336,7 @@ def test_standardize_dims(self, reader, dims): dims=dims, ) standardized = reader._standardize_dims(variable) - assert standardized.dims == ('y', 'x') + assert standardized.dims == ("y", "x") @staticmethod def test_drop_coords(reader): @@ -344,7 +344,7 @@ def test_drop_coords(reader): coords = "dummy" data = xr.DataArray( np.ones(10), - dims=('y'), + dims=("y"), coords={coords: 0}, ) assert coords in data.coords @@ -355,22 +355,22 @@ def test_get_global_attributes(self, reader): """Test get global attributes.""" attributes = reader._get_global_attributes() assert attributes == { - 'filename': reader.filename, - 'start_time': datetime(2000, 1, 2, 3, 4, 5), - 'end_time': datetime(2000, 1, 2, 4, 5, 6), - 'spacecraft_name': 'Metop-SG-A1', - 'sensor': 'MWS', - 'filename_start_time': datetime(2000, 1, 1, 1, 0), - 'filename_end_time': datetime(2000, 1, 1, 2, 0), - 'platform_name': 'Metop-SG-A1', - 'quality_group': { - 'duration_of_product': np.array(5944., dtype=np.float32), - 'overall_quality_flag': 0, + "filename": reader.filename, + "start_time": datetime(2000, 1, 2, 3, 4, 5), + "end_time": datetime(2000, 1, 2, 4, 5, 6), + "spacecraft_name": "Metop-SG-A1", + "sensor": "MWS", + "filename_start_time": datetime(2000, 1, 1, 1, 0), + "filename_end_time": datetime(2000, 1, 1, 2, 0), + "platform_name": "Metop-SG-A1", + "quality_group": { + "duration_of_product": np.array(5944., dtype=np.float32), + "overall_quality_flag": 0, } } @patch( - 'satpy.readers.mws_l1b.MWSL1BFile._get_global_attributes', + "satpy.readers.mws_l1b.MWSL1BFile._get_global_attributes", return_value={"mocked_global_attributes": True}, ) def test_manage_attributes(self, mock, reader): @@ -379,17 +379,17 @@ def test_manage_attributes(self, mock, reader): np.ones(N_SCANS), attrs={"season": "summer"}, ) - dataset_info = {'name': '1', 'units': 'K'} + dataset_info = {"name": "1", "units": "K"} variable = reader._manage_attributes(variable, dataset_info) assert variable.attrs == { - 'season': 'summer', - 'units': 'K', - 'name': '1', - 'mocked_global_attributes': True, + "season": "summer", + "units": "K", + "name": "1", + "mocked_global_attributes": True, } -@pytest.mark.parametrize("name, index", [('1', 0), ('2', 1), ('24', 23)]) +@pytest.mark.parametrize(("name", "index"), [("1", 0), ("2", 1), ("24", 23)]) def test_get_channel_index_from_name(name, index): """Test getting the MWS channel index from the channel name.""" ch_idx = get_channel_index_from_name(name) @@ -398,8 +398,5 @@ def test_get_channel_index_from_name(name, index): def test_get_channel_index_from_name_throw_exception(): """Test that an excpetion is thrown when getting the MWS channel index from an unsupported name.""" - with pytest.raises(Exception) as excinfo: - _ = get_channel_index_from_name('channel 1') - - assert str(excinfo.value) == "Channel name 'channel 1' not supported" - assert excinfo.type == AttributeError + with pytest.raises(AttributeError, match="Channel name 'channel 1' not supported"): + _ = get_channel_index_from_name("channel 1") diff --git a/satpy/tests/reader_tests/test_netcdf_utils.py b/satpy/tests/reader_tests/test_netcdf_utils.py index 8570d8bd34..ea104ed086 100644 --- a/satpy/tests/reader_tests/test_netcdf_utils.py +++ b/satpy/tests/reader_tests/test_netcdf_utils.py @@ -76,26 +76,26 @@ class TestNetCDF4FileHandler(unittest.TestCase): def setUp(self): """Create a test NetCDF4 file.""" from netCDF4 import Dataset - with Dataset('test.nc', 'w') as nc: + with Dataset("test.nc", "w") as nc: # Create dimensions - nc.createDimension('rows', 10) - nc.createDimension('cols', 100) + nc.createDimension("rows", 10) + nc.createDimension("cols", 100) # Create Group - g1 = nc.createGroup('test_group') + g1 = nc.createGroup("test_group") # Add datasets - ds1_f = g1.createVariable('ds1_f', np.float32, - dimensions=('rows', 'cols')) + ds1_f = g1.createVariable("ds1_f", np.float32, + dimensions=("rows", "cols")) ds1_f[:] = np.arange(10. * 100).reshape((10, 100)) - ds1_i = g1.createVariable('ds1_i', np.int32, - dimensions=('rows', 'cols')) + ds1_i = g1.createVariable("ds1_i", np.int32, + dimensions=("rows", "cols")) ds1_i[:] = np.arange(10 * 100).reshape((10, 100)) - ds2_f = nc.createVariable('ds2_f', np.float32, - dimensions=('rows', 'cols')) + ds2_f = nc.createVariable("ds2_f", np.float32, + dimensions=("rows", "cols")) ds2_f[:] = np.arange(10. * 100).reshape((10, 100)) - ds2_i = nc.createVariable('ds2_i', np.int32, - dimensions=('rows', 'cols')) + ds2_i = nc.createVariable("ds2_i", np.int32, + dimensions=("rows", "cols")) ds2_i[:] = np.arange(10 * 100).reshape((10, 100)) ds2_s = nc.createVariable("ds2_s", np.int8, dimensions=("rows",)) @@ -104,119 +104,118 @@ def setUp(self): ds2_sc[:] = 42 # Add attributes - nc.test_attr_str = 'test_string' + nc.test_attr_str = "test_string" nc.test_attr_int = 0 nc.test_attr_float = 1.2 nc.test_attr_str_arr = np.array(b"test_string2") - g1.test_attr_str = 'test_string' + g1.test_attr_str = "test_string" g1.test_attr_int = 0 g1.test_attr_float = 1.2 for d in [ds1_f, ds1_i, ds2_f, ds2_i]: - d.test_attr_str = 'test_string' + d.test_attr_str = "test_string" d.test_attr_int = 0 d.test_attr_float = 1.2 def tearDown(self): """Remove the previously created test file.""" - os.remove('test.nc') + os.remove("test.nc") def test_all_basic(self): """Test everything about the NetCDF4 class.""" import xarray as xr from satpy.readers.netcdf_utils import NetCDF4FileHandler - file_handler = NetCDF4FileHandler('test.nc', {}, {}) + file_handler = NetCDF4FileHandler("test.nc", {}, {}) - self.assertEqual(file_handler['/dimension/rows'], 10) - self.assertEqual(file_handler['/dimension/cols'], 100) + assert file_handler["/dimension/rows"] == 10 + assert file_handler["/dimension/cols"] == 100 - for ds in ('test_group/ds1_f', 'test_group/ds1_i', 'ds2_f', 'ds2_i'): - self.assertEqual(file_handler[ds].dtype, np.float32 if ds.endswith('f') else np.int32) - self.assertTupleEqual(file_handler[ds + '/shape'], (10, 100)) - self.assertEqual(file_handler[ds + '/dimensions'], ("rows", "cols")) - self.assertEqual(file_handler[ds + '/attr/test_attr_str'], 'test_string') - self.assertEqual(file_handler[ds + '/attr/test_attr_int'], 0) - self.assertEqual(file_handler[ds + '/attr/test_attr_float'], 1.2) + for ds in ("test_group/ds1_f", "test_group/ds1_i", "ds2_f", "ds2_i"): + assert file_handler[ds].dtype == (np.float32 if ds.endswith("f") else np.int32) + assert file_handler[ds + "/shape"] == (10, 100) + assert file_handler[ds + "/dimensions"] == ("rows", "cols") + assert file_handler[ds + "/attr/test_attr_str"] == "test_string" + assert file_handler[ds + "/attr/test_attr_int"] == 0 + assert file_handler[ds + "/attr/test_attr_float"] == 1.2 - test_group = file_handler['test_group'] - self.assertTupleEqual(test_group['ds1_i'].shape, (10, 100)) - self.assertTupleEqual(test_group['ds1_i'].dims, ('rows', 'cols')) + test_group = file_handler["test_group"] + assert test_group["ds1_i"].shape == (10, 100) + assert test_group["ds1_i"].dims == ("rows", "cols") - self.assertEqual(file_handler['/attr/test_attr_str'], 'test_string') - self.assertEqual(file_handler['/attr/test_attr_str_arr'], 'test_string2') - self.assertEqual(file_handler['/attr/test_attr_int'], 0) - self.assertEqual(file_handler['/attr/test_attr_float'], 1.2) + assert file_handler["/attr/test_attr_str"] == "test_string" + assert file_handler["/attr/test_attr_str_arr"] == "test_string2" + assert file_handler["/attr/test_attr_int"] == 0 + assert file_handler["/attr/test_attr_float"] == 1.2 global_attrs = { - 'test_attr_str': 'test_string', - 'test_attr_str_arr': 'test_string2', - 'test_attr_int': 0, - 'test_attr_float': 1.2 + "test_attr_str": "test_string", + "test_attr_str_arr": "test_string2", + "test_attr_int": 0, + "test_attr_float": 1.2 } - self.assertEqual(file_handler['/attrs'], global_attrs) + assert file_handler["/attrs"] == global_attrs - self.assertIsInstance(file_handler.get('ds2_f')[:], xr.DataArray) - self.assertIsNone(file_handler.get('fake_ds')) - self.assertEqual(file_handler.get('fake_ds', 'test'), 'test') + assert isinstance(file_handler.get("ds2_f")[:], xr.DataArray) + assert file_handler.get("fake_ds") is None + assert file_handler.get("fake_ds", "test") == "test" - self.assertTrue('ds2_f' in file_handler) - self.assertFalse('fake_ds' in file_handler) - self.assertIsNone(file_handler.file_handle) - self.assertEqual(file_handler["ds2_sc"], 42) + assert ("ds2_f" in file_handler) is True + assert ("fake_ds" in file_handler) is False + assert file_handler.file_handle is None + assert file_handler["ds2_sc"] == 42 def test_listed_variables(self): """Test that only listed variables/attributes area collected.""" from satpy.readers.netcdf_utils import NetCDF4FileHandler filetype_info = { - 'required_netcdf_variables': [ - 'test_group/attr/test_attr_str', - 'attr/test_attr_str', + "required_netcdf_variables": [ + "test_group/attr/test_attr_str", + "attr/test_attr_str", ] } - file_handler = NetCDF4FileHandler('test.nc', {}, filetype_info) + file_handler = NetCDF4FileHandler("test.nc", {}, filetype_info) assert len(file_handler.file_content) == 2 - assert 'test_group/attr/test_attr_str' in file_handler.file_content - assert 'attr/test_attr_str' in file_handler.file_content + assert "test_group/attr/test_attr_str" in file_handler.file_content + assert "attr/test_attr_str" in file_handler.file_content def test_listed_variables_with_composing(self): """Test that composing for listed variables is performed.""" from satpy.readers.netcdf_utils import NetCDF4FileHandler filetype_info = { - 'required_netcdf_variables': [ - 'test_group/{some_parameter}/attr/test_attr_str', - 'test_group/attr/test_attr_str', + "required_netcdf_variables": [ + "test_group/{some_parameter}/attr/test_attr_str", + "test_group/attr/test_attr_str", ], - 'variable_name_replacements': { - 'some_parameter': [ - 'ds1_f', - 'ds1_i', + "variable_name_replacements": { + "some_parameter": [ + "ds1_f", + "ds1_i", ], - 'another_parameter': [ - 'not_used' + "another_parameter": [ + "not_used" ], } } - file_handler = NetCDF4FileHandler('test.nc', {}, filetype_info) + file_handler = NetCDF4FileHandler("test.nc", {}, filetype_info) assert len(file_handler.file_content) == 3 - assert 'test_group/ds1_f/attr/test_attr_str' in file_handler.file_content - assert 'test_group/ds1_i/attr/test_attr_str' in file_handler.file_content - assert not any('not_used' in var for var in file_handler.file_content) - assert not any('some_parameter' in var for var in file_handler.file_content) - assert not any('another_parameter' in var for var in file_handler.file_content) - assert 'test_group/attr/test_attr_str' in file_handler.file_content + assert "test_group/ds1_f/attr/test_attr_str" in file_handler.file_content + assert "test_group/ds1_i/attr/test_attr_str" in file_handler.file_content + assert not any("not_used" in var for var in file_handler.file_content) + assert not any("some_parameter" in var for var in file_handler.file_content) + assert not any("another_parameter" in var for var in file_handler.file_content) + assert "test_group/attr/test_attr_str" in file_handler.file_content def test_caching(self): """Test that caching works as intended.""" from satpy.readers.netcdf_utils import NetCDF4FileHandler h = NetCDF4FileHandler("test.nc", {}, {}, cache_var_size=1000, cache_handle=True) - self.assertIsNotNone(h.file_handle) - self.assertTrue(h.file_handle.isopen()) + assert h.file_handle is not None + assert h.file_handle.isopen() - self.assertEqual(sorted(h.cached_file_content.keys()), - ["ds2_s", "ds2_sc"]) + assert sorted(h.cached_file_content.keys()) == ["ds2_s", "ds2_sc"] # with caching, these tests access different lines than without np.testing.assert_array_equal(h["ds2_s"], np.arange(10)) np.testing.assert_array_equal(h["test_group/ds1_i"], @@ -227,7 +226,7 @@ def test_caching(self): h["ds2_f"], np.arange(10. * 100).reshape((10, 100))) h.__del__() - self.assertFalse(h.file_handle.isopen()) + assert not h.file_handle.isopen() def test_filenotfound(self): """Test that error is raised when file not found.""" @@ -241,21 +240,21 @@ def test_get_and_cache_npxr_is_xr(self): import xarray as xr from satpy.readers.netcdf_utils import NetCDF4FileHandler - file_handler = NetCDF4FileHandler('test.nc', {}, {}, cache_handle=True) + file_handler = NetCDF4FileHandler("test.nc", {}, {}, cache_handle=True) - data = file_handler.get_and_cache_npxr('test_group/ds1_f') + data = file_handler.get_and_cache_npxr("test_group/ds1_f") assert isinstance(data, xr.DataArray) def test_get_and_cache_npxr_data_is_cached(self): """Test that the data are cached when get_and_cache_npxr() is called.""" from satpy.readers.netcdf_utils import NetCDF4FileHandler - file_handler = NetCDF4FileHandler('test.nc', {}, {}, cache_handle=True) - data = file_handler.get_and_cache_npxr('test_group/ds1_f') + file_handler = NetCDF4FileHandler("test.nc", {}, {}, cache_handle=True) + data = file_handler.get_and_cache_npxr("test_group/ds1_f") # Delete the dataset from the file content dict, it should be available from the cache del file_handler.file_content["test_group/ds1_f"] - data2 = file_handler.get_and_cache_npxr('test_group/ds1_f') + data2 = file_handler.get_and_cache_npxr("test_group/ds1_f") assert np.all(data == data2) diff --git a/satpy/tests/reader_tests/test_nucaps.py b/satpy/tests/reader_tests/test_nucaps.py index 2f7b0c97a5..a1f5736bdb 100644 --- a/satpy/tests/reader_tests/test_nucaps.py +++ b/satpy/tests/reader_tests/test_nucaps.py @@ -61,93 +61,93 @@ class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = { - '/attr/time_coverage_start': "2020-10-20T12:00:00.5Z", - '/attr/time_coverage_end': "2020-10-20T12:00:36Z", - '/attr/start_orbit_number': 1, - '/attr/end_orbit_number': 2, - '/attr/platform_name': 'NPP', - '/attr/instrument_name': 'CrIS, ATMS, VIIRS', + "/attr/time_coverage_start": "2020-10-20T12:00:00.5Z", + "/attr/time_coverage_end": "2020-10-20T12:00:36Z", + "/attr/start_orbit_number": 1, + "/attr/end_orbit_number": 2, + "/attr/platform_name": "NPP", + "/attr/instrument_name": "CrIS, ATMS, VIIRS", } for k, units, standard_name in [ - ('Solar_Zenith', 'degrees', 'solar_zenith_angle'), - ('Topography', 'meters', ''), - ('Land_Fraction', '1', ''), - ('Surface_Pressure', 'mb', ''), - ('Skin_Temperature', 'Kelvin', 'surface_temperature'), + ("Solar_Zenith", "degrees", "solar_zenith_angle"), + ("Topography", "meters", ""), + ("Land_Fraction", "1", ""), + ("Surface_Pressure", "mb", ""), + ("Skin_Temperature", "Kelvin", "surface_temperature"), ]: file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/units'] = units - file_content[k + '/attr/valid_range'] = (0., 120.) - file_content[k + '/attr/_FillValue'] = -9999. + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/units"] = units + file_content[k + "/attr/valid_range"] = (0., 120.) + file_content[k + "/attr/_FillValue"] = -9999. if standard_name: - file_content[k + '/attr/standard_name'] = standard_name + file_content[k + "/attr/standard_name"] = standard_name for k, units, standard_name in [ - ('Temperature', 'Kelvin', 'air_temperature'), - ('Effective_Pressure', 'mb', ''), - ('H2O', '1', ''), - ('H2O_MR', 'g/g', ''), - ('O3', '1', ''), - ('O3_MR', '1', ''), - ('Liquid_H2O', '1', ''), - ('Liquid_H2O_MR', 'g/g', 'cloud_liquid_water_mixing_ratio'), - ('CO', '1', ''), - ('CO_MR', '1', ''), - ('CH4', '1', ''), - ('CH4_MR', '1', ''), - ('CO2', '1', ''), - ('HNO3', '1', ''), - ('HNO3_MR', '1', ''), - ('N2O', '1', ''), - ('N2O_MR', '1', ''), - ('SO2', '1', ''), - ('SO2_MR', '1', ''), + ("Temperature", "Kelvin", "air_temperature"), + ("Effective_Pressure", "mb", ""), + ("H2O", "1", ""), + ("H2O_MR", "g/g", ""), + ("O3", "1", ""), + ("O3_MR", "1", ""), + ("Liquid_H2O", "1", ""), + ("Liquid_H2O_MR", "g/g", "cloud_liquid_water_mixing_ratio"), + ("CO", "1", ""), + ("CO_MR", "1", ""), + ("CH4", "1", ""), + ("CH4_MR", "1", ""), + ("CO2", "1", ""), + ("HNO3", "1", ""), + ("HNO3_MR", "1", ""), + ("N2O", "1", ""), + ("N2O_MR", "1", ""), + ("SO2", "1", ""), + ("SO2_MR", "1", ""), ]: file_content[k] = DEFAULT_PRES_FILE_DATA - file_content[k + '/shape'] = DEFAULT_PRES_FILE_SHAPE - file_content[k + '/attr/units'] = units - file_content[k + '/attr/valid_range'] = (0., 120.) - file_content[k + '/attr/_FillValue'] = -9999. + file_content[k + "/shape"] = DEFAULT_PRES_FILE_SHAPE + file_content[k + "/attr/units"] = units + file_content[k + "/attr/valid_range"] = (0., 120.) + file_content[k + "/attr/_FillValue"] = -9999. if standard_name: - file_content[k + '/attr/standard_name'] = standard_name - k = 'Pressure' + file_content[k + "/attr/standard_name"] = standard_name + k = "Pressure" file_content[k] = ALL_PRESSURE_LEVELS - file_content[k + '/shape'] = DEFAULT_PRES_FILE_SHAPE - file_content[k + '/attr/units'] = 'mb' - file_content[k + '/attr/valid_range'] = (0., 2000.) - file_content[k + '/attr/_FillValue'] = -9999. + file_content[k + "/shape"] = DEFAULT_PRES_FILE_SHAPE + file_content[k + "/attr/units"] = "mb" + file_content[k + "/attr/valid_range"] = (0., 2000.) + file_content[k + "/attr/_FillValue"] = -9999. - k = 'Quality_Flag' + k = "Quality_Flag" file_content[k] = DEFAULT_FILE_DATA.astype(np.int32) - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/valid_range'] = (0, 31) - file_content[k + '/attr/_FillValue'] = -9999. + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/valid_range"] = (0, 31) + file_content[k + "/attr/_FillValue"] = -9999. - k = 'Longitude' + k = "Longitude" file_content[k] = DEFAULT_LON_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/units'] = 'degrees_east' - file_content[k + '/attr/valid_range'] = (-180., 180.) - file_content[k + '/attr/standard_name'] = 'longitude' - file_content[k + '/attr/_FillValue'] = -9999. + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/units"] = "degrees_east" + file_content[k + "/attr/valid_range"] = (-180., 180.) + file_content[k + "/attr/standard_name"] = "longitude" + file_content[k + "/attr/_FillValue"] = -9999. - k = 'Latitude' + k = "Latitude" file_content[k] = DEFAULT_LAT_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/units'] = 'degrees_north' - file_content[k + '/attr/valid_range'] = (-90., 90.) - file_content[k + '/attr/standard_name'] = 'latitude' - file_content[k + '/attr/_FillValue'] = -9999. - - attrs = ('_FillValue', 'flag_meanings', 'flag_values', 'units') - cris_fors_dim_name = 'Number_of_CrIS_FORs' - pressure_levels_dim_name = 'Number_of_P_Levels' - if ('_v1' in filename): - cris_fors_dim_name = 'number_of_FORs' - pressure_levels_dim_name = 'number_of_p_levels' + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/units"] = "degrees_north" + file_content[k + "/attr/valid_range"] = (-90., 90.) + file_content[k + "/attr/standard_name"] = "latitude" + file_content[k + "/attr/_FillValue"] = -9999. + + attrs = ("_FillValue", "flag_meanings", "flag_values", "units") + cris_fors_dim_name = "Number_of_CrIS_FORs" + pressure_levels_dim_name = "Number_of_P_Levels" + if ("_v1" in filename): + cris_fors_dim_name = "number_of_FORs" + pressure_levels_dim_name = "number_of_p_levels" convert_file_content_to_data_array( file_content, attrs=attrs, - dims=('z', cris_fors_dim_name, pressure_levels_dim_name)) + dims=("z", cris_fors_dim_name, pressure_levels_dim_name)) return file_content @@ -160,9 +160,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.nucaps import NUCAPSFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(NUCAPSFileHandler, '__bases__', (FakeNetCDF4FileHandler2,)) + self.p = mock.patch.object(NUCAPSFileHandler, "__bases__", (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -175,197 +175,194 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_init_with_kwargs(self): """Test basic init with extra parameters.""" from satpy.readers import load_reader r = load_reader(self.reader_configs, mask_surface=False) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) - self.assertEqual(len(loadables), 1) - r.create_filehandlers(loadables, fh_kwargs={'mask_surface': False}) + assert len(loadables) == 1 + r.create_filehandlers(loadables, fh_kwargs={"mask_surface": False}) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_nonpressure_based(self): """Test loading all channels that aren't based on pressure.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Solar_Zenith', - 'Topography', - 'Land_Fraction', - 'Surface_Pressure', - 'Skin_Temperature', - 'Quality_Flag', + datasets = r.load(["Solar_Zenith", + "Topography", + "Land_Fraction", + "Surface_Pressure", + "Skin_Temperature", + "Quality_Flag", ]) - self.assertEqual(len(datasets), 6) + assert len(datasets) == 6 for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) # self.assertEqual(v.info['units'], 'degrees') - self.assertEqual(v.ndim, 1) - self.assertEqual(v.attrs['sensor'], set(['cris', 'atms', 'viirs'])) - self.assertEqual(type(v.attrs['start_time']), datetime.datetime) - self.assertEqual(type(v.attrs['end_time']), datetime.datetime) + assert v.ndim == 1 + assert v.attrs["sensor"] == set(["cris", "atms", "viirs"]) + assert type(v.attrs["start_time"]) == datetime.datetime + assert type(v.attrs["end_time"]) == datetime.datetime def test_load_pressure_based(self): """Test loading all channels based on pressure.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature', - 'Effective_Pressure', - 'H2O', - 'H2O_MR', - 'O3', - 'O3_MR', - 'Liquid_H2O', - 'Liquid_H2O_MR', - 'CO', - 'CO_MR', - 'CH4', - 'CH4_MR', - 'CO2', - 'HNO3', - 'HNO3_MR', - 'N2O', - 'N2O_MR', - 'SO2', - 'SO2_MR', + datasets = r.load(["Temperature", + "Effective_Pressure", + "H2O", + "H2O_MR", + "O3", + "O3_MR", + "Liquid_H2O", + "Liquid_H2O_MR", + "CO", + "CO_MR", + "CH4", + "CH4_MR", + "CO2", + "HNO3", + "HNO3_MR", + "N2O", + "N2O_MR", + "SO2", + "SO2_MR", ]) - self.assertEqual(len(datasets), 19) + assert len(datasets) == 19 for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) - self.assertEqual(v.ndim, 2) + assert v.ndim == 2 if np.issubdtype(v.dtype, np.floating): - assert '_FillValue' not in v.attrs + assert "_FillValue" not in v.attrs def test_load_multiple_files_pressure(self): """Test loading Temperature from multiple input files.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', - 'NUCAPS-EDR_v1r0_npp_s201603011159009_e201603011159307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", + "NUCAPS-EDR_v1r0_npp_s201603011159009_e201603011159307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=True) - self.assertEqual(len(datasets), 100) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=True) + assert len(datasets) == 100 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_individual_pressure_levels_true(self): """Test loading Temperature with individual pressure datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=True) - self.assertEqual(len(datasets), 100) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=True) + assert len(datasets) == 100 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_individual_pressure_levels_min_max(self): """Test loading individual Temperature with min/max level specified.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=(100., 150.)) - self.assertEqual(len(datasets), 6) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(100., 150.)) + assert len(datasets) == 6 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_individual_pressure_levels_single(self): """Test loading individual Temperature with specific levels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=(103.017,)) - self.assertEqual(len(datasets), 1) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(103.017,)) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_pressure_levels_true(self): """Test loading Temperature with all pressure levels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature'], pressure_levels=True) - self.assertEqual(len(datasets), 1) + datasets = r.load(["Temperature"], pressure_levels=True) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 2) - self.assertTupleEqual(v.shape, DEFAULT_PRES_FILE_SHAPE) + assert v.ndim == 2 + assert v.shape == DEFAULT_PRES_FILE_SHAPE def test_load_pressure_levels_min_max(self): """Test loading Temperature with min/max level specified.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature'], pressure_levels=(100., 150.)) - self.assertEqual(len(datasets), 1) + datasets = r.load(["Temperature"], pressure_levels=(100., 150.)) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 2) - self.assertTupleEqual(v.shape, - (DEFAULT_PRES_FILE_SHAPE[0], 6)) + assert v.ndim == 2 + assert v.shape == (DEFAULT_PRES_FILE_SHAPE[0], 6) def test_load_pressure_levels_single(self): """Test loading a specific Temperature level.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature'], pressure_levels=(103.017,)) - self.assertEqual(len(datasets), 1) + datasets = r.load(["Temperature"], pressure_levels=(103.017,)) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 2) - self.assertTupleEqual(v.shape, - (DEFAULT_PRES_FILE_SHAPE[0], 1)) + assert v.ndim == 2 + assert v.shape == (DEFAULT_PRES_FILE_SHAPE[0], 1) def test_load_pressure_levels_single_and_pressure_levels(self): """Test loading a specific Temperature level and pressure levels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc', + "NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature', 'Pressure_Levels'], pressure_levels=(103.017,)) - self.assertEqual(len(datasets), 2) - t_ds = datasets['Temperature'] - self.assertEqual(t_ds.ndim, 2) - self.assertTupleEqual(t_ds.shape, - (DEFAULT_PRES_FILE_SHAPE[0], 1)) - pl_ds = datasets['Pressure_Levels'] - self.assertTupleEqual(pl_ds.shape, (1,)) + datasets = r.load(["Temperature", "Pressure_Levels"], pressure_levels=(103.017,)) + assert len(datasets) == 2 + t_ds = datasets["Temperature"] + assert t_ds.ndim == 2 + assert t_ds.shape == (DEFAULT_PRES_FILE_SHAPE[0], 1) + pl_ds = datasets["Pressure_Levels"] + assert pl_ds.shape == (1,) class TestNUCAPSScienceEDRReader(unittest.TestCase): @@ -377,9 +374,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.nucaps import NUCAPSFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(NUCAPSFileHandler, '__bases__', (FakeNetCDF4FileHandler2,)) + self.p = mock.patch.object(NUCAPSFileHandler, "__bases__", (FakeNetCDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -392,160 +389,157 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_nonpressure_based(self): """Test loading all channels that aren't based on pressure.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Topography', - 'Land_Fraction', - 'Surface_Pressure', - 'Skin_Temperature', - 'Quality_Flag', + datasets = r.load(["Topography", + "Land_Fraction", + "Surface_Pressure", + "Skin_Temperature", + "Quality_Flag", ]) - self.assertEqual(len(datasets), 5) + assert len(datasets) == 5 for v in datasets.values(): - self.assertEqual(v.ndim, 1) - self.assertEqual(v.attrs['sensor'], set(['cris', 'atms', 'viirs'])) - self.assertEqual(type(v.attrs['start_time']), datetime.datetime) - self.assertEqual(type(v.attrs['end_time']), datetime.datetime) + assert v.ndim == 1 + assert v.attrs["sensor"] == set(["cris", "atms", "viirs"]) + assert type(v.attrs["start_time"]) == datetime.datetime + assert type(v.attrs["end_time"]) == datetime.datetime def test_load_pressure_based(self): """Test loading all channels based on pressure.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature', - 'H2O', - 'H2O_MR', - 'O3', - 'O3_MR', - 'CO', - 'CO_MR', - 'CH4', - 'CH4_MR', - 'CO2', - 'HNO3', - 'HNO3_MR', - 'N2O', - 'N2O_MR', - 'SO2', - 'SO2_MR', + datasets = r.load(["Temperature", + "H2O", + "H2O_MR", + "O3", + "O3_MR", + "CO", + "CO_MR", + "CH4", + "CH4_MR", + "CO2", + "HNO3", + "HNO3_MR", + "N2O", + "N2O_MR", + "SO2", + "SO2_MR", ]) - self.assertEqual(len(datasets), 16) + assert len(datasets) == 16 for v in datasets.values(): # self.assertNotEqual(v.info['resolution'], 0) - self.assertEqual(v.ndim, 2) + assert v.ndim == 2 def test_load_individual_pressure_levels_true(self): """Test loading Temperature with individual pressure datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=True) - self.assertEqual(len(datasets), 100) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=True) + assert len(datasets) == 100 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_individual_pressure_levels_min_max(self): """Test loading individual Temperature with min/max level specified.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=(100., 150.)) - self.assertEqual(len(datasets), 6) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(100., 150.)) + assert len(datasets) == 6 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_individual_pressure_levels_single(self): """Test loading individual Temperature with specific levels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=(103.017,)) - self.assertEqual(len(datasets), 1) + datasets = r.load(r.pressure_dataset_names["Temperature"], pressure_levels=(103.017,)) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 1) + assert v.ndim == 1 def test_load_pressure_levels_true(self): """Test loading Temperature with all pressure levels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature'], pressure_levels=True) - self.assertEqual(len(datasets), 1) + datasets = r.load(["Temperature"], pressure_levels=True) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 2) - self.assertTupleEqual(v.shape, DEFAULT_PRES_FILE_SHAPE) + assert v.ndim == 2 + assert v.shape == DEFAULT_PRES_FILE_SHAPE def test_load_pressure_levels_min_max(self): """Test loading Temperature with min/max level specified.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature'], pressure_levels=(100., 150.)) - self.assertEqual(len(datasets), 1) + datasets = r.load(["Temperature"], pressure_levels=(100., 150.)) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 2) - self.assertTupleEqual(v.shape, - (DEFAULT_PRES_FILE_SHAPE[0], 6)) + assert v.ndim == 2 + assert v.shape == (DEFAULT_PRES_FILE_SHAPE[0], 6) def test_load_pressure_levels_single(self): """Test loading a specific Temperature level.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature'], pressure_levels=(103.017,)) - self.assertEqual(len(datasets), 1) + datasets = r.load(["Temperature"], pressure_levels=(103.017,)) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.ndim, 2) - self.assertTupleEqual(v.shape, - (DEFAULT_PRES_FILE_SHAPE[0], 1)) + assert v.ndim == 2 + assert v.shape == (DEFAULT_PRES_FILE_SHAPE[0], 1) def test_load_pressure_levels_single_and_pressure_levels(self): """Test loading a specific Temperature level and pressure levels.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc', + "NUCAPS-sciEDR_am_npp_s20190703223319_e20190703223349_STC_fsr.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['Temperature', 'Pressure_Levels'], pressure_levels=(103.017,)) - self.assertEqual(len(datasets), 2) - t_ds = datasets['Temperature'] - self.assertEqual(t_ds.ndim, 2) - self.assertTupleEqual(t_ds.shape, - (DEFAULT_PRES_FILE_SHAPE[0], 1)) - pl_ds = datasets['Pressure_Levels'] - self.assertTupleEqual(pl_ds.shape, (1,)) + datasets = r.load(["Temperature", "Pressure_Levels"], pressure_levels=(103.017,)) + assert len(datasets) == 2 + t_ds = datasets["Temperature"] + assert t_ds.ndim == 2 + assert t_ds.shape == (DEFAULT_PRES_FILE_SHAPE[0], 1) + pl_ds = datasets["Pressure_Levels"] + assert pl_ds.shape == (1,) diff --git a/satpy/tests/reader_tests/test_nwcsaf_msg.py b/satpy/tests/reader_tests/test_nwcsaf_msg.py index 5e3053058e..6d4dbfe53f 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_msg.py +++ b/satpy/tests/reader_tests/test_nwcsaf_msg.py @@ -23,6 +23,7 @@ import h5py import numpy as np +import pytest from satpy.tests.reader_tests.utils import fill_h5 @@ -428,9 +429,9 @@ } AREA_DEF_DICT = { - "proj_dict": {'proj': 'geos', 'lon_0': 0, 'h': 35785831, 'x_0': 0, 'y_0': 0, - 'a': 6378169, 'b': 6356583.8, 'units': 'm', 'no_defs': None, 'type': 'crs'}, - "area_id": 'MSG-N', + "proj_dict": {"proj": "geos", "lon_0": 0, "h": 35785831, "x_0": 0, "y_0": 0, + "a": 6378169, "b": 6356583.8, "units": "m", "no_defs": None, "type": "crs"}, + "area_id": "MSG-N", "x_size": 3712, "y_size": 1856, "area_extent": (-5570248.2825, 1501.0099, 5567247.8793, 5570247.8784) @@ -481,20 +482,20 @@ def test_get_area_def(self): area_def = test.get_area_def(dsid) - aext_res = AREA_DEF_DICT['area_extent'] + aext_res = AREA_DEF_DICT["area_extent"] for i in range(4): - self.assertAlmostEqual(area_def.area_extent[i], aext_res[i], 4) + assert area_def.area_extent[i] == pytest.approx(aext_res[i], abs=1e-4) - proj_dict = AREA_DEF_DICT['proj_dict'] - self.assertEqual(proj_dict['proj'], area_def.proj_dict['proj']) + proj_dict = AREA_DEF_DICT["proj_dict"] + assert proj_dict["proj"] == area_def.proj_dict["proj"] # Not all elements passed on Appveyor, so skip testing every single element of the proj-dict: # for key in proj_dict: # self.assertEqual(proj_dict[key], area_def.proj_dict[key]) - self.assertEqual(AREA_DEF_DICT['x_size'], area_def.width) - self.assertEqual(AREA_DEF_DICT['y_size'], area_def.height) + assert AREA_DEF_DICT["x_size"] == area_def.width + assert AREA_DEF_DICT["y_size"] == area_def.height - self.assertEqual(AREA_DEF_DICT['area_id'], area_def.area_id) + assert AREA_DEF_DICT["area_id"] == area_def.area_id def test_get_dataset(self): """Retrieve datasets from a NWCSAF msgv2013 hdf5 file.""" @@ -506,8 +507,8 @@ def test_get_dataset(self): dsid = make_dataid(name="ct") test = Hdf5NWCSAF(self.filename_ct, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CT"}) - self.assertEqual(ds.shape, (1856, 3712)) - self.assertEqual(ds.dtype, np.uint8) + assert ds.shape == (1856, 3712) + assert ds.dtype == np.uint8 np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTYPE_TEST_FRAME) filename_info = {} @@ -515,8 +516,8 @@ def test_get_dataset(self): dsid = make_dataid(name="ctth_alti") test = Hdf5NWCSAF(self.filename_ctth, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CTTH_HEIGHT"}) - self.assertEqual(ds.shape, (1856, 3712)) - self.assertEqual(ds.dtype, np.float32) + assert ds.shape == (1856, 3712) + assert ds.dtype == np.float32 np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTTH_HEIGHT_TEST_FRAME_RES) filename_info = {} @@ -524,8 +525,8 @@ def test_get_dataset(self): dsid = make_dataid(name="ctth_pres") test = Hdf5NWCSAF(self.filename_ctth, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CTTH_PRESS"}) - self.assertEqual(ds.shape, (1856, 3712)) - self.assertEqual(ds.dtype, np.float32) + assert ds.shape == (1856, 3712) + assert ds.dtype == np.float32 np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTTH_PRESSURE_TEST_FRAME_RES) filename_info = {} @@ -533,8 +534,8 @@ def test_get_dataset(self): dsid = make_dataid(name="ctth_tempe") test = Hdf5NWCSAF(self.filename_ctth, filename_info, filetype_info) ds = test.get_dataset(dsid, {"file_key": "CTTH_TEMPER"}) - self.assertEqual(ds.shape, (1856, 3712)) - self.assertEqual(ds.dtype, np.float32) + assert ds.shape == (1856, 3712) + assert ds.dtype == np.float32 np.testing.assert_allclose(ds.data[1000:1010, 1000:1010].compute(), CTTH_TEMPERATURE_TEST_FRAME_RES) def tearDown(self): diff --git a/satpy/tests/reader_tests/test_nwcsaf_nc.py b/satpy/tests/reader_tests/test_nwcsaf_nc.py index 095533d959..2070e5187c 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_nc.py +++ b/satpy/tests/reader_tests/test_nwcsaf_nc.py @@ -23,19 +23,19 @@ from satpy.readers.nwcsaf_nc import NcNWCSAF, read_nwcsaf_time -PROJ_KM = {'gdal_projection': '+proj=geos +a=6378.137000 +b=6356.752300 +lon_0=0.000000 +h=35785.863000', - 'gdal_xgeo_up_left': -5569500.0, - 'gdal_ygeo_up_left': 5437500.0, - 'gdal_xgeo_low_right': 5566500.0, - 'gdal_ygeo_low_right': 2653500.0} +PROJ_KM = {"gdal_projection": "+proj=geos +a=6378.137000 +b=6356.752300 +lon_0=0.000000 +h=35785.863000", + "gdal_xgeo_up_left": -5569500.0, + "gdal_ygeo_up_left": 5437500.0, + "gdal_xgeo_low_right": 5566500.0, + "gdal_ygeo_low_right": 2653500.0} NOMINAL_ALTITUDE = 35785863.0 -PROJ = {'gdal_projection': f'+proj=geos +a=6378137.000 +b=6356752.300 +lon_0=0.000000 +h={NOMINAL_ALTITUDE:.3f}', - 'gdal_xgeo_up_left': -5569500.0, - 'gdal_ygeo_up_left': 5437500.0, - 'gdal_xgeo_low_right': 5566500.0, - 'gdal_ygeo_low_right': 2653500.0} +PROJ = {"gdal_projection": f"+proj=geos +a=6378137.000 +b=6356752.300 +lon_0=0.000000 +h={NOMINAL_ALTITUDE:.3f}", + "gdal_xgeo_up_left": -5569500.0, + "gdal_ygeo_up_left": 5437500.0, + "gdal_xgeo_low_right": 5566500.0, + "gdal_ygeo_low_right": 2653500.0} dimensions = {"nx": 1530, @@ -105,7 +105,7 @@ def create_nwcsaf_geo_ct_file(directory, attrs=global_attrs): return filename -@pytest.fixture +@pytest.fixture() def nwcsaf_geo_ct_filehandler(nwcsaf_geo_ct_filename): """Create a CT filehandler.""" return NcNWCSAF(nwcsaf_geo_ct_filename, {}, {}) @@ -156,13 +156,13 @@ def create_ctth_file(path, attrs=global_attrs): return filename -@pytest.fixture +@pytest.fixture() def nwcsaf_pps_cmic_filehandler(nwcsaf_pps_cmic_filename): """Create a CMIC filehandler.""" return NcNWCSAF(nwcsaf_pps_cmic_filename, {}, {"file_key_prefix": "cmic_"}) -@pytest.fixture +@pytest.fixture() def nwcsaf_pps_ctth_filehandler(nwcsaf_pps_ctth_filename): """Create a CMIC filehandler.""" return NcNWCSAF(nwcsaf_pps_ctth_filename, {}, {}) @@ -218,7 +218,7 @@ def create_ctth_alti_pal_variable_with_fill_value_color(nc_file, var_name): var.attrs["_FillValue"] = 65535 -@pytest.fixture +@pytest.fixture() def nwcsaf_pps_cpp_filehandler(nwcsaf_pps_cpp_filename): """Create a CPP filehandler.""" return NcNWCSAF(nwcsaf_pps_cpp_filename, {}, {"file_key_prefix": "cpp_"}) @@ -233,7 +233,7 @@ def nwcsaf_old_geo_ct_filename(tmp_path_factory): return create_nwcsaf_geo_ct_file(tmp_path_factory.mktemp("data-old"), attrs=attrs) -@pytest.fixture +@pytest.fixture() def nwcsaf_old_geo_ct_filehandler(nwcsaf_old_geo_ct_filename): """Create a CT filehandler.""" return NcNWCSAF(nwcsaf_old_geo_ct_filename, {}, {}) @@ -242,19 +242,19 @@ def nwcsaf_old_geo_ct_filehandler(nwcsaf_old_geo_ct_filename): class TestNcNWCSAFGeo: """Test the NcNWCSAF reader for Geo products.""" - @pytest.mark.parametrize("platform, instrument", [("Metop-B", "avhrr-3"), - ("NOAA-20", "viirs"), - ("Himawari-8", "ahi"), - ("GOES-17", "abi"), - ("Meteosat-11", "seviri")]) + @pytest.mark.parametrize(("platform", "instrument"), [("Metop-B", "avhrr-3"), + ("NOAA-20", "viirs"), + ("Himawari-8", "ahi"), + ("GOES-17", "abi"), + ("Meteosat-11", "seviri")]) def test_sensor_name_platform(self, nwcsaf_geo_ct_filehandler, platform, instrument): """Test that the correct sensor name is being set.""" nwcsaf_geo_ct_filehandler.set_platform_and_sensor(platform_name=platform) assert nwcsaf_geo_ct_filehandler.sensor == set([instrument]) assert nwcsaf_geo_ct_filehandler.sensor_names == set([instrument]) - @pytest.mark.parametrize("platform, instrument", [("GOES16", "abi"), - ("MSG4", "seviri")]) + @pytest.mark.parametrize(("platform", "instrument"), [("GOES16", "abi"), + ("MSG4", "seviri")]) def test_sensor_name_sat_id(self, nwcsaf_geo_ct_filehandler, platform, instrument): """Test that the correct sensor name is being set.""" nwcsaf_geo_ct_filehandler.set_platform_and_sensor(sat_id=platform) @@ -263,13 +263,13 @@ def test_sensor_name_sat_id(self, nwcsaf_geo_ct_filehandler, platform, instrumen def test_get_area_def(self, nwcsaf_geo_ct_filehandler): """Test that get_area_def() returns proper area.""" - dsid = {'name': 'ct'} + dsid = {"name": "ct"} _check_area_def(nwcsaf_geo_ct_filehandler.get_area_def(dsid)) def test_get_area_def_km(self, nwcsaf_old_geo_ct_filehandler): """Test that get_area_def() returns proper area when the projection is in km.""" - dsid = {'name': 'ct'} + dsid = {"name": "ct"} _check_area_def(nwcsaf_old_geo_ct_filehandler.get_area_def(dsid)) def test_scale_dataset_attr_removal(self, nwcsaf_geo_ct_filehandler): @@ -277,58 +277,58 @@ def test_scale_dataset_attr_removal(self, nwcsaf_geo_ct_filehandler): import numpy as np import xarray as xr - attrs = {'scale_factor': np.array(10), - 'add_offset': np.array(20)} + attrs = {"scale_factor": np.array(10), + "add_offset": np.array(20)} var = xr.DataArray([1, 2, 3], attrs=attrs) - var = nwcsaf_geo_ct_filehandler.scale_dataset(var, 'dummy') + var = nwcsaf_geo_ct_filehandler.scale_dataset(var, "dummy") np.testing.assert_allclose(var, [30, 40, 50]) - assert 'scale_factor' not in var.attrs - assert 'add_offset' not in var.attrs - - @pytest.mark.parametrize("attrs, expected", [({'scale_factor': np.array(1.5), - 'add_offset': np.array(2.5), - '_FillValue': 1}, - [np.nan, 5.5, 7]), - ({'scale_factor': np.array(1.5), - 'add_offset': np.array(2.5), - 'valid_min': 1.1}, - [np.nan, 5.5, 7]), - ({'scale_factor': np.array(1.5), - 'add_offset': np.array(2.5), - 'valid_max': 2.1}, - [4, 5.5, np.nan]), - ({'scale_factor': np.array(1.5), - 'add_offset': np.array(2.5), - 'valid_range': (1.1, 2.1)}, - [np.nan, 5.5, np.nan])]) + assert "scale_factor" not in var.attrs + assert "add_offset" not in var.attrs + + @pytest.mark.parametrize(("attrs", "expected"), [({"scale_factor": np.array(1.5), + "add_offset": np.array(2.5), + "_FillValue": 1}, + [np.nan, 5.5, 7]), + ({"scale_factor": np.array(1.5), + "add_offset": np.array(2.5), + "valid_min": 1.1}, + [np.nan, 5.5, 7]), + ({"scale_factor": np.array(1.5), + "add_offset": np.array(2.5), + "valid_max": 2.1}, + [4, 5.5, np.nan]), + ({"scale_factor": np.array(1.5), + "add_offset": np.array(2.5), + "valid_range": (1.1, 2.1)}, + [np.nan, 5.5, np.nan])]) def test_scale_dataset_floating(self, nwcsaf_geo_ct_filehandler, attrs, expected): """Test the scaling of the dataset with floating point values.""" var = xr.DataArray([1, 2, 3], attrs=attrs) - var = nwcsaf_geo_ct_filehandler.scale_dataset(var, 'dummy') + var = nwcsaf_geo_ct_filehandler.scale_dataset(var, "dummy") np.testing.assert_allclose(var, expected) - assert 'scale_factor' not in var.attrs - assert 'add_offset' not in var.attrs + assert "scale_factor" not in var.attrs + assert "add_offset" not in var.attrs def test_scale_dataset_floating_nwcsaf_geo_ctth(self, nwcsaf_geo_ct_filehandler): """Test the scaling of the dataset with floating point values for CTTH NWCSAF/Geo v2016/v2018.""" - attrs = {'scale_factor': np.array(1.), - 'add_offset': np.array(-2000.), - 'valid_range': (0., 27000.)} + attrs = {"scale_factor": np.array(1.), + "add_offset": np.array(-2000.), + "valid_range": (0., 27000.)} var = xr.DataArray([1, 2, 3], attrs=attrs) - var = nwcsaf_geo_ct_filehandler.scale_dataset(var, 'dummy') + var = nwcsaf_geo_ct_filehandler.scale_dataset(var, "dummy") np.testing.assert_allclose(var, [-1999., -1998., -1997.]) - assert 'scale_factor' not in var.attrs - assert 'add_offset' not in var.attrs - np.testing.assert_equal(var.attrs['valid_range'], (-2000., 25000.)) + assert "scale_factor" not in var.attrs + assert "add_offset" not in var.attrs + np.testing.assert_equal(var.attrs["valid_range"], (-2000., 25000.)) def test_orbital_parameters_are_correct(self, nwcsaf_geo_ct_filehandler): """Test that orbital parameters are present in the dataset attributes.""" - dsid = {'name': 'ct'} + dsid = {"name": "ct"} var = nwcsaf_geo_ct_filehandler.get_dataset(dsid, {}) assert "orbital_parameters" in var.attrs - for param in var.attrs['orbital_parameters']: - assert isinstance(var.attrs['orbital_parameters'][param], (float, int)) + for param in var.attrs["orbital_parameters"]: + assert isinstance(var.attrs["orbital_parameters"][param], (float, int)) assert var.attrs["orbital_parameters"]["satellite_nominal_altitude"] == NOMINAL_ALTITUDE assert var.attrs["orbital_parameters"]["satellite_nominal_longitude"] == NOMINAL_LONGITUDE @@ -336,7 +336,7 @@ def test_orbital_parameters_are_correct(self, nwcsaf_geo_ct_filehandler): def test_times_are_in_dataset_attributes(self, nwcsaf_geo_ct_filehandler): """Check that start/end times are in the attributes of datasets.""" - dsid = {'name': 'ct'} + dsid = {"name": "ct"} var = nwcsaf_geo_ct_filehandler.get_dataset(dsid, {}) assert "start_time" in var.attrs assert "end_time" in var.attrs @@ -363,29 +363,29 @@ def test_end_time(self, nwcsaf_pps_cmic_filehandler): def test_drop_xycoords(self, nwcsaf_pps_cmic_filehandler): """Test the drop of x and y coords.""" - y_line = xr.DataArray(list(range(5)), dims=('y'), attrs={"long_name": "scan line number"}) - x_pixel = xr.DataArray(list(range(10)), dims=('x'), attrs={"long_name": "pixel number"}) + y_line = xr.DataArray(list(range(5)), dims=("y"), attrs={"long_name": "scan line number"}) + x_pixel = xr.DataArray(list(range(10)), dims=("x"), attrs={"long_name": "pixel number"}) lat = xr.DataArray(np.ones((5, 10)), - dims=('y', 'x'), - coords={'y': y_line, 'x': x_pixel}, - attrs={'name': 'lat', - 'standard_name': 'latitude'}) + dims=("y", "x"), + coords={"y": y_line, "x": x_pixel}, + attrs={"name": "lat", + "standard_name": "latitude"}) lon = xr.DataArray(np.ones((5, 10)), - dims=('y', 'x'), - coords={'y': y_line, 'x': x_pixel}, - attrs={'name': 'lon', - 'standard_name': 'longitude'}) + dims=("y", "x"), + coords={"y": y_line, "x": x_pixel}, + attrs={"name": "lon", + "standard_name": "longitude"}) data_array_in = xr.DataArray(np.ones((5, 10)), attrs={"scale_factor": np.array(0, dtype=float), "add_offset": np.array(1, dtype=float)}, - dims=('y', 'x'), - coords={'lon': lon, 'lat': lat, 'y': y_line, 'x': x_pixel}) + dims=("y", "x"), + coords={"lon": lon, "lat": lat, "y": y_line, "x": x_pixel}) data_array_out = nwcsaf_pps_cmic_filehandler.drop_xycoords(data_array_in) - assert 'y' not in data_array_out.coords + assert "y" not in data_array_out.coords def test_get_dataset_scales_and_offsets(self, nwcsaf_pps_cpp_filehandler): """Test that get_dataset() returns scaled and offseted data.""" - dsid = {'name': 'cpp_cot'} + dsid = {"name": "cpp_cot"} info = dict(name="cpp_cot", file_type="nc_nwcsaf_cpp") @@ -395,7 +395,7 @@ def test_get_dataset_scales_and_offsets(self, nwcsaf_pps_cpp_filehandler): def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(self, nwcsaf_pps_cpp_filehandler): """Test that get_dataset() returns scaled palette_meanings with another dataset as scaling source.""" - dsid = {'name': 'cpp_cot_pal'} + dsid = {"name": "cpp_cot_pal"} info = dict(name="cpp_cot_pal", file_type="nc_nwcsaf_cpp", @@ -407,7 +407,7 @@ def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(sel def test_get_palette_fill_value_color_added(self, nwcsaf_pps_ctth_filehandler): """Test that get_dataset() returns scaled palette_meanings with fill_value_color added.""" - dsid = {'name': 'ctth_alti_pal'} + dsid = {"name": "ctth_alti_pal"} info = dict(name="ctth_alti_pal", file_type="nc_nwcsaf_ctth", @@ -420,7 +420,7 @@ def test_get_palette_fill_value_color_added(self, nwcsaf_pps_ctth_filehandler): def test_get_dataset_raises_when_dataset_missing(self, nwcsaf_pps_cpp_filehandler): """Test that get_dataset() raises an error when the requested dataset is missing.""" - dsid = {'name': 'cpp_phase'} + dsid = {"name": "cpp_phase"} info = dict(name="cpp_phase", file_type="nc_nwcsaf_cpp") with pytest.raises(KeyError): @@ -428,8 +428,8 @@ def test_get_dataset_raises_when_dataset_missing(self, nwcsaf_pps_cpp_filehandle def test_get_dataset_uses_file_key_if_present(self, nwcsaf_pps_cmic_filehandler, nwcsaf_pps_cpp_filehandler): """Test that get_dataset() uses a file_key if present.""" - dsid_cpp = {'name': 'cpp_cot'} - dsid_cmic = {'name': 'cmic_cot'} + dsid_cpp = {"name": "cpp_cot"} + dsid_cmic = {"name": "cmic_cot"} file_key = "cmic_cot" @@ -449,17 +449,17 @@ def test_get_dataset_uses_file_key_if_present(self, nwcsaf_pps_cmic_filehandler, def test_get_dataset_can_handle_file_key_list(self, nwcsaf_pps_cmic_filehandler, nwcsaf_pps_cpp_filehandler): """Test that get_dataset() can handle a list of file_keys.""" - dsid_cpp = {'name': 'cpp_reff'} - dsid_cmic = {'name': 'cmic_cre'} + dsid_cpp = {"name": "cpp_reff"} + dsid_cmic = {"name": "cmic_cre"} info_cpp = dict(name="cmic_reff", - file_key=['reff', 'cre'], + file_key=["reff", "cre"], file_type="nc_nwcsaf_cpp") res_cpp = nwcsaf_pps_cpp_filehandler.get_dataset(dsid_cpp, info_cpp) info_cmic = dict(name="cmic_reff", - file_key=['reff', 'cre'], + file_key=["reff", "cre"], file_type="nc_nwcsaf_cpp") res_cmic = nwcsaf_pps_cmic_filehandler.get_dataset(dsid_cmic, info_cmic) @@ -471,8 +471,8 @@ class TestNcNWCSAFFileKeyPrefix: def test_get_dataset_uses_file_key_prefix(self, nwcsaf_pps_cmic_filehandler): """Test that get_dataset() uses a file_key_prefix.""" - dsid_cpp = {'name': 'cpp_cot'} - dsid_cmic = {'name': 'cmic_cot'} + dsid_cpp = {"name": "cpp_cot"} + dsid_cmic = {"name": "cmic_cot"} file_key = "cot" @@ -490,7 +490,7 @@ def test_get_dataset_uses_file_key_prefix(self, nwcsaf_pps_cmic_filehandler): def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(self, nwcsaf_pps_cmic_filehandler): """Test that get_dataset() returns scaled palette_meanings using another dataset as scaling source.""" - dsid = {'name': 'cpp_cot_pal'} + dsid = {"name": "cpp_cot_pal"} info = dict(name="cpp_cot_pal", file_key="cot_pal", @@ -503,11 +503,11 @@ def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(sel def _check_area_def(area_definition): - correct_h = float(PROJ['gdal_projection'].split('+h=')[-1]) - correct_a = float(PROJ['gdal_projection'].split('+a=')[-1].split()[0]) - assert area_definition.proj_dict['h'] == correct_h - assert area_definition.proj_dict['a'] == correct_a - assert area_definition.proj_dict['units'] == 'm' + correct_h = float(PROJ["gdal_projection"].split("+h=")[-1]) + correct_a = float(PROJ["gdal_projection"].split("+a=")[-1].split()[0]) + assert area_definition.proj_dict["h"] == correct_h + assert area_definition.proj_dict["a"] == correct_a + assert area_definition.proj_dict["units"] == "m" correct_extent = (PROJ["gdal_xgeo_up_left"], PROJ["gdal_ygeo_low_right"], PROJ["gdal_xgeo_low_right"], diff --git a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py index 8575c337cb..bdb0edfb03 100644 --- a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py +++ b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py @@ -49,12 +49,12 @@ def fake_dataset(): nobs = xr.DataArray( [[5, 118, 5, 100], [0, 15, 0, 1]], dims=("lat", "lon"), - attrs={'_FillValue': 0} + attrs={"_FillValue": 0} ) nobs_filt = xr.DataArray( [[5, 118, 5, 100], [np.nan, 15, np.nan, 1]], dims=("lat", "lon"), - attrs={'_FillValue': 0} + attrs={"_FillValue": 0} ) watcls = xr.DataArray( [[12.2, 0.01, 6.754, 5.33], [12.5, 101.5, 103.5, 204.]], @@ -83,50 +83,50 @@ def fake_dataset(): ) -ds_dict = {'adg_490': 'adg_490', - 'water_class10': 'water_class10', - 'seawifs_nobs_sum': 'test_nobs', - 'kd_490': 'kd_490', - 'atot_665': 'atot_665'} +ds_dict = {"adg_490": "adg_490", + "water_class10": "water_class10", + "seawifs_nobs_sum": "test_nobs", + "kd_490": "kd_490", + "atot_665": "atot_665"} -ds_list_all = ['adg_490', 'water_class10', 'seawifs_nobs_sum', 'kd_490', 'atot_665'] -ds_list_iop = ['adg_490', 'water_class10', 'seawifs_nobs_sum', 'atot_665'] -ds_list_kd = ['kd_490', 'water_class10', 'seawifs_nobs_sum'] +ds_list_all = ["adg_490", "water_class10", "seawifs_nobs_sum", "kd_490", "atot_665"] +ds_list_iop = ["adg_490", "water_class10", "seawifs_nobs_sum", "atot_665"] +ds_list_kd = ["kd_490", "water_class10", "seawifs_nobs_sum"] -@pytest.fixture +@pytest.fixture() def fake_file_dict(fake_dataset, tmp_path): """Write a fake dataset to file.""" fdict = {} filename = tmp_path / "ESACCI-OC-L3S-OC_PRODUCTS-MERGED-10M_MONTHLY_4km_GEO_PML_OCx_QAA-202112-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['bad_month'] = filename + fdict["bad_month"] = filename filename = tmp_path / "ESACCI-OC-L3S-OC_PRODUCTS-MERGED-2D_DAILY_4km_GEO_PML_OCx_QAA-202112-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['bad_day'] = filename + fdict["bad_day"] = filename filename = tmp_path / "ESACCI-OC-L3S-OC_PRODUCTS-MERGED-1M_MONTHLY_4km_GEO_PML_OCx_QAA-202112-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['ocprod_1m'] = filename + fdict["ocprod_1m"] = filename filename = tmp_path / "ESACCI-OC-L3S-OC_PRODUCTS-MERGED-5D_DAILY_4km_GEO_PML_OCx_QAA-202112-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['ocprod_5d'] = filename + fdict["ocprod_5d"] = filename filename = tmp_path / "ESACCI-OC-L3S-IOP-MERGED-8D_DAILY_4km_GEO_PML_RRS-20211117-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['iop_8d'] = filename + fdict["iop_8d"] = filename filename = tmp_path / "ESACCI-OC-L3S-IOP-MERGED-1D_DAILY_4km_GEO_PML_OCx-202112-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['iop_1d'] = filename + fdict["iop_1d"] = filename filename = tmp_path / "ESACCI-OC-L3S-K_490-MERGED-1D_DAILY_4km_GEO_PML_RRS-20210113-fv5.0.nc" fake_dataset.to_netcdf(filename) - fdict['k490_1d'] = filename + fdict["k490_1d"] = filename - yield fdict + return fdict class TestOCCCIReader: @@ -137,7 +137,7 @@ def setup_method(self): from satpy._config import config_search_paths self.yaml_file = "oceancolorcci_l3_nc.yaml" - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) def _create_reader_for_resolutions(self, filename): from satpy.readers import load_reader @@ -149,10 +149,10 @@ def _create_reader_for_resolutions(self, filename): assert reader.file_handlers return reader - @pytest.fixture + @pytest.fixture() def area_exp(self): """Get expected area definition.""" - proj_dict = {'datum': 'WGS84', 'no_defs': 'None', 'proj': 'longlat', 'type': 'crs'} + proj_dict = {"datum": "WGS84", "no_defs": "None", "proj": "longlat", "type": "crs"} return AreaDefinition( area_id="gridded_occci", @@ -166,9 +166,9 @@ def area_exp(self): def test_get_area_def(self, area_exp, fake_file_dict): """Test area definition.""" - reader = self._create_reader_for_resolutions([fake_file_dict['ocprod_1m']]) + reader = self._create_reader_for_resolutions([fake_file_dict["ocprod_1m"]]) res = reader.load([ds_list_all[0]]) - area = res[ds_list_all[0]].attrs['area'] + area = res[ds_list_all[0]].attrs["area"] assert area.area_id == area_exp.area_id assert area.area_extent == area_exp.area_extent @@ -178,75 +178,75 @@ def test_get_area_def(self, area_exp, fake_file_dict): def test_bad_fname(self, fake_dataset, fake_file_dict): """Test case where an incorrect composite period is given.""" - reader = self._create_reader_for_resolutions([fake_file_dict['bad_month']]) + reader = self._create_reader_for_resolutions([fake_file_dict["bad_month"]]) res = reader.load([ds_list_all[0]]) assert len(res) == 0 - reader = self._create_reader_for_resolutions([fake_file_dict['bad_day']]) + reader = self._create_reader_for_resolutions([fake_file_dict["bad_day"]]) res = reader.load([ds_list_all[0]]) assert len(res) == 0 def test_get_dataset_monthly_allprods(self, fake_dataset, fake_file_dict): """Test dataset loading.""" - reader = self._create_reader_for_resolutions([fake_file_dict['ocprod_1m']]) + reader = self._create_reader_for_resolutions([fake_file_dict["ocprod_1m"]]) # Check how many datasets are available. This file contains all of them. assert len(list(reader.available_dataset_names)) == 94 res = reader.load(ds_list_all) assert len(res) == len(ds_list_all) for curds in ds_list_all: np.testing.assert_allclose(res[curds].values, fake_dataset[ds_dict[curds]].values) - assert res[curds].attrs['sensor'] == 'merged' - assert res[curds].attrs['composite_period'] == 'monthly' + assert res[curds].attrs["sensor"] == "merged" + assert res[curds].attrs["composite_period"] == "monthly" def test_get_dataset_8d_iopprods(self, fake_dataset, fake_file_dict): """Test dataset loading.""" - reader = self._create_reader_for_resolutions([fake_file_dict['iop_8d']]) + reader = self._create_reader_for_resolutions([fake_file_dict["iop_8d"]]) # Check how many datasets are available. This file contains all of them. assert len(list(reader.available_dataset_names)) == 70 res = reader.load(ds_list_iop) assert len(res) == len(ds_list_iop) for curds in ds_list_iop: np.testing.assert_allclose(res[curds].values, fake_dataset[ds_dict[curds]].values) - assert res[curds].attrs['sensor'] == 'merged' - assert res[curds].attrs['composite_period'] == '8-day' + assert res[curds].attrs["sensor"] == "merged" + assert res[curds].attrs["composite_period"] == "8-day" def test_get_dataset_1d_kprods(self, fake_dataset, fake_file_dict): """Test dataset loading.""" - reader = self._create_reader_for_resolutions([fake_file_dict['k490_1d']]) + reader = self._create_reader_for_resolutions([fake_file_dict["k490_1d"]]) # Check how many datasets are available. This file contains all of them. assert len(list(reader.available_dataset_names)) == 25 res = reader.load(ds_list_kd) assert len(res) == len(ds_list_kd) for curds in ds_list_kd: np.testing.assert_allclose(res[curds].values, fake_dataset[ds_dict[curds]].values) - assert res[curds].attrs['sensor'] == 'merged' - assert res[curds].attrs['composite_period'] == 'daily' + assert res[curds].attrs["sensor"] == "merged" + assert res[curds].attrs["composite_period"] == "daily" def test_get_dataset_5d_allprods(self, fake_dataset, fake_file_dict): """Test dataset loading.""" - reader = self._create_reader_for_resolutions([fake_file_dict['ocprod_5d']]) + reader = self._create_reader_for_resolutions([fake_file_dict["ocprod_5d"]]) # Check how many datasets are available. This file contains all of them. assert len(list(reader.available_dataset_names)) == 94 res = reader.load(ds_list_all) assert len(res) == len(ds_list_all) for curds in ds_list_all: np.testing.assert_allclose(res[curds].values, fake_dataset[ds_dict[curds]].values) - assert res[curds].attrs['sensor'] == 'merged' - assert res[curds].attrs['composite_period'] == '5-day' + assert res[curds].attrs["sensor"] == "merged" + assert res[curds].attrs["composite_period"] == "5-day" def test_start_time(self, fake_file_dict): """Test start time property.""" - reader = self._create_reader_for_resolutions([fake_file_dict['k490_1d']]) + reader = self._create_reader_for_resolutions([fake_file_dict["k490_1d"]]) assert reader.start_time == datetime(2021, 8, 1, 0, 0, 0) def test_end_time(self, fake_file_dict): """Test end time property.""" - reader = self._create_reader_for_resolutions([fake_file_dict['iop_8d']]) + reader = self._create_reader_for_resolutions([fake_file_dict["iop_8d"]]) assert reader.end_time == datetime(2021, 8, 31, 23, 59, 0) def test_correct_dimnames(self, fake_file_dict): """Check that the loaded dimension names are correct.""" - reader = self._create_reader_for_resolutions([fake_file_dict['ocprod_5d']]) + reader = self._create_reader_for_resolutions([fake_file_dict["ocprod_5d"]]) res = reader.load(ds_list_all) for dsname in ds_list_all: - assert res[dsname].dims[0] == 'y' - assert res[dsname].dims[1] == 'x' + assert res[dsname].dims[0] == "y" + assert res[dsname].dims[1] == "x" diff --git a/satpy/tests/reader_tests/test_olci_nc.py b/satpy/tests/reader_tests/test_olci_nc.py index 6761511cf5..2f37fb2098 100644 --- a/satpy/tests/reader_tests/test_olci_nc.py +++ b/satpy/tests/reader_tests/test_olci_nc.py @@ -24,7 +24,7 @@ class TestOLCIReader(unittest.TestCase): """Test various olci_nc filehandlers.""" - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_instantiate(self, mocked_dataset): """Test initialization of file handlers.""" import xarray as xr @@ -34,66 +34,66 @@ def test_instantiate(self, mocked_dataset): cal_data = xr.Dataset( { - 'solar_flux': (('bands'), [0, 1, 2]), - 'detector_index': (('bands'), [0, 1, 2]), + "solar_flux": (("bands"), [0, 1, 2]), + "detector_index": (("bands"), [0, 1, 2]), }, - {'bands': [0, 1, 2], }, + {"bands": [0, 1, 2], }, ) - ds_id = make_dataid(name='Oa01', calibration='reflectance') - ds_id2 = make_dataid(name='wsqf', calibration='reflectance') - filename_info = {'mission_id': 'S3A', 'dataset_name': 'Oa01', 'start_time': 0, 'end_time': 0} + ds_id = make_dataid(name="Oa01", calibration="reflectance") + ds_id2 = make_dataid(name="wsqf", calibration="reflectance") + filename_info = {"mission_id": "S3A", "dataset_name": "Oa01", "start_time": 0, "end_time": 0} - test = NCOLCIBase('somedir/somefile.nc', filename_info, 'c') + test = NCOLCIBase("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - test = NCOLCICal('somedir/somefile.nc', filename_info, 'c') + test = NCOLCICal("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - test = NCOLCIGeo('somedir/somefile.nc', filename_info, 'c') + test = NCOLCIGeo("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - test = NCOLCIChannelBase('somedir/somefile.nc', filename_info, 'c') + test = NCOLCIChannelBase("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() cal = mock.Mock() cal.nc = cal_data - test = NCOLCI1B('somedir/somefile.nc', filename_info, 'c', cal) + test = NCOLCI1B("somedir/somefile.nc", filename_info, "c", cal) test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - test = NCOLCI2('somedir/somefile.nc', filename_info, 'c') - test.get_dataset(ds_id, {'nc_key': 'the_key'}) - test.get_dataset(ds_id2, {'nc_key': 'the_key'}) + test = NCOLCI2("somedir/somefile.nc", filename_info, "c") + test.get_dataset(ds_id, {"nc_key": "the_key"}) + test.get_dataset(ds_id2, {"nc_key": "the_key"}) mocked_dataset.assert_called() mocked_dataset.reset_mock() - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_open_file_objects(self, mocked_open_dataset): """Test initialization of file handlers.""" from satpy.readers.olci_nc import NCOLCIBase - filename_info = {'mission_id': 'S3A', 'dataset_name': 'Oa01', 'start_time': 0, 'end_time': 0} + filename_info = {"mission_id": "S3A", "dataset_name": "Oa01", "start_time": 0, "end_time": 0} open_file = mock.MagicMock() - file_handler = NCOLCIBase(open_file, filename_info, 'c') + file_handler = NCOLCIBase(open_file, filename_info, "c") # deepcode ignore W0104: This is a property that is actually a function call. file_handler.nc # pylint: disable=W0104 mocked_open_dataset.assert_called() open_file.open.assert_called() assert (open_file.open.return_value in mocked_open_dataset.call_args[0] or - open_file.open.return_value == mocked_open_dataset.call_args[1].get('filename_or_obj')) + open_file.open.return_value == mocked_open_dataset.call_args[1].get("filename_or_obj")) - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_get_mask(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -101,15 +101,15 @@ def test_get_mask(self, mocked_dataset): from satpy.readers.olci_nc import NCOLCI2 from satpy.tests.utils import make_dataid - mocked_dataset.return_value = xr.Dataset({'mask': (['rows', 'columns'], + mocked_dataset.return_value = xr.Dataset({"mask": (["rows", "columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))}, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}) - ds_id = make_dataid(name='mask') - filename_info = {'mission_id': 'S3A', 'dataset_name': 'mask', 'start_time': 0, 'end_time': 0} - test = NCOLCI2('somedir/somefile.nc', filename_info, 'c') - res = test.get_dataset(ds_id, {'nc_key': 'mask'}) - self.assertEqual(res.dtype, np.dtype('bool')) + coords={"rows": np.arange(5), + "columns": np.arange(6)}) + ds_id = make_dataid(name="mask") + filename_info = {"mission_id": "S3A", "dataset_name": "mask", "start_time": 0, "end_time": 0} + test = NCOLCI2("somedir/somefile.nc", filename_info, "c") + res = test.get_dataset(ds_id, {"nc_key": "mask"}) + assert res.dtype == np.dtype("bool") expected = np.array([[True, False, True, True, True, True], [False, False, True, True, False, False], [False, False, False, False, False, True], @@ -117,7 +117,7 @@ def test_get_mask(self, mocked_dataset): [True, False, False, True, False, False]]) np.testing.assert_array_equal(res.values, expected) - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_get_mask_with_alternative_items(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -125,19 +125,19 @@ def test_get_mask_with_alternative_items(self, mocked_dataset): from satpy.readers.olci_nc import NCOLCI2 from satpy.tests.utils import make_dataid - mocked_dataset.return_value = xr.Dataset({'mask': (['rows', 'columns'], + mocked_dataset.return_value = xr.Dataset({"mask": (["rows", "columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))}, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}) - ds_id = make_dataid(name='mask') - filename_info = {'mission_id': 'S3A', 'dataset_name': 'mask', 'start_time': 0, 'end_time': 0} - test = NCOLCI2('somedir/somefile.nc', filename_info, 'c', mask_items=["INVALID"]) - res = test.get_dataset(ds_id, {'nc_key': 'mask'}) - self.assertEqual(res.dtype, np.dtype('bool')) + coords={"rows": np.arange(5), + "columns": np.arange(6)}) + ds_id = make_dataid(name="mask") + filename_info = {"mission_id": "S3A", "dataset_name": "mask", "start_time": 0, "end_time": 0} + test = NCOLCI2("somedir/somefile.nc", filename_info, "c", mask_items=["INVALID"]) + res = test.get_dataset(ds_id, {"nc_key": "mask"}) + assert res.dtype == np.dtype("bool") expected = np.array([True] + [False] * 29).reshape(5, 6) np.testing.assert_array_equal(res.values, expected) - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_olci_angles(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -146,31 +146,31 @@ def test_olci_angles(self, mocked_dataset): from satpy.readers.olci_nc import NCOLCIAngles from satpy.tests.utils import make_dataid attr_dict = { - 'ac_subsampling_factor': 1, - 'al_subsampling_factor': 2, + "ac_subsampling_factor": 1, + "al_subsampling_factor": 2, } - mocked_dataset.return_value = xr.Dataset({'SAA': (['tie_rows', 'tie_columns'], + mocked_dataset.return_value = xr.Dataset({"SAA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'SZA': (['tie_rows', 'tie_columns'], + "SZA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'OAA': (['tie_rows', 'tie_columns'], + "OAA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'OZA': (['tie_rows', 'tie_columns'], + "OZA": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))}, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}, + coords={"rows": np.arange(5), + "columns": np.arange(6)}, attrs=attr_dict) - filename_info = {'mission_id': 'S3A', 'dataset_name': 'Oa01', 'start_time': 0, 'end_time': 0} + filename_info = {"mission_id": "S3A", "dataset_name": "Oa01", "start_time": 0, "end_time": 0} - ds_id = make_dataid(name='solar_azimuth_angle') - ds_id2 = make_dataid(name='satellite_zenith_angle') - test = NCOLCIAngles('somedir/somefile.nc', filename_info, 'c') + ds_id = make_dataid(name="solar_azimuth_angle") + ds_id2 = make_dataid(name="satellite_zenith_angle") + test = NCOLCIAngles("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) test.get_dataset(ds_id2, filename_info) mocked_dataset.assert_called() mocked_dataset.reset_mock() - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_olci_meteo(self, mocked_dataset): """Test reading datasets.""" import numpy as np @@ -179,26 +179,26 @@ def test_olci_meteo(self, mocked_dataset): from satpy.readers.olci_nc import NCOLCIMeteo from satpy.tests.utils import make_dataid attr_dict = { - 'ac_subsampling_factor': 1, - 'al_subsampling_factor': 2, + "ac_subsampling_factor": 1, + "al_subsampling_factor": 2, } - data = {'humidity': (['tie_rows', 'tie_columns'], + data = {"humidity": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'total_ozone': (['tie_rows', 'tie_columns'], + "total_ozone": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'sea_level_pressure': (['tie_rows', 'tie_columns'], + "sea_level_pressure": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6)), - 'total_columnar_water_vapour': (['tie_rows', 'tie_columns'], + "total_columnar_water_vapour": (["tie_rows", "tie_columns"], np.array([1 << x for x in range(30)]).reshape(5, 6))} mocked_dataset.return_value = xr.Dataset(data, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}, + coords={"rows": np.arange(5), + "columns": np.arange(6)}, attrs=attr_dict) - filename_info = {'mission_id': 'S3A', 'dataset_name': 'humidity', 'start_time': 0, 'end_time': 0} + filename_info = {"mission_id": "S3A", "dataset_name": "humidity", "start_time": 0, "end_time": 0} - ds_id = make_dataid(name='humidity') - ds_id2 = make_dataid(name='total_ozone') - test = NCOLCIMeteo('somedir/somefile.nc', filename_info, 'c') + ds_id = make_dataid(name="humidity") + ds_id2 = make_dataid(name="total_ozone") + test = NCOLCIMeteo("somedir/somefile.nc", filename_info, "c") test.get_dataset(ds_id, filename_info) test.get_dataset(ds_id2, filename_info) mocked_dataset.assert_called() @@ -213,28 +213,28 @@ def test_chl_nn(self, mocked_dataset): from satpy.readers.olci_nc import NCOLCI2 from satpy.tests.utils import make_dataid attr_dict = { - 'ac_subsampling_factor': 64, - 'al_subsampling_factor': 1, + "ac_subsampling_factor": 64, + "al_subsampling_factor": 1, } - data = {'CHL_NN': (['rows', 'columns'], + data = {"CHL_NN": (["rows", "columns"], np.arange(30).reshape(5, 6).astype(float), {"units": "lg(re mg.m-3)"})} mocked_dataset.return_value = xr.Dataset(data, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}, + coords={"rows": np.arange(5), + "columns": np.arange(6)}, attrs=attr_dict) - ds_info = {'name': 'chl_nn', 'sensor': 'olci', 'resolution': 300, - 'standard_name': 'algal_pigment_concentration', 'units': 'lg(re mg.m-3)', - 'coordinates': ('longitude', 'latitude'), 'file_type': 'esa_l2_chl_nn', 'nc_key': 'CHL_NN', - 'modifiers': ()} - filename_info = {'mission_id': 'S3A', 'datatype_id': 'WFR', - 'start_time': datetime.datetime(2019, 9, 24, 9, 29, 39), - 'end_time': datetime.datetime(2019, 9, 24, 9, 32, 39), - 'creation_time': datetime.datetime(2019, 9, 24, 11, 40, 26), 'duration': 179, 'cycle': 49, - 'relative_orbit': 307, 'frame': 1800, 'centre': 'MAR', 'mode': 'O', 'timeliness': 'NR', - 'collection': '002'} - ds_id = make_dataid(name='chl_nn') - file_handler = NCOLCI2('somedir/somefile.nc', filename_info, None, unlog=True) + ds_info = {"name": "chl_nn", "sensor": "olci", "resolution": 300, + "standard_name": "algal_pigment_concentration", "units": "lg(re mg.m-3)", + "coordinates": ("longitude", "latitude"), "file_type": "esa_l2_chl_nn", "nc_key": "CHL_NN", + "modifiers": ()} + filename_info = {"mission_id": "S3A", "datatype_id": "WFR", + "start_time": datetime.datetime(2019, 9, 24, 9, 29, 39), + "end_time": datetime.datetime(2019, 9, 24, 9, 32, 39), + "creation_time": datetime.datetime(2019, 9, 24, 11, 40, 26), "duration": 179, "cycle": 49, + "relative_orbit": 307, "frame": 1800, "centre": "MAR", "mode": "O", "timeliness": "NR", + "collection": "002"} + ds_id = make_dataid(name="chl_nn") + file_handler = NCOLCI2("somedir/somefile.nc", filename_info, None, unlog=True) res = file_handler.get_dataset(ds_id, ds_info) assert res.attrs["units"] == "mg.m-3" @@ -251,13 +251,13 @@ def test_bitflags(self): import numpy as np from satpy.readers.olci_nc import BitFlags - flag_list = ['INVALID', 'WATER', 'LAND', 'CLOUD', 'SNOW_ICE', - 'INLAND_WATER', 'TIDAL', 'COSMETIC', 'SUSPECT', 'HISOLZEN', - 'SATURATED', 'MEGLINT', 'HIGHGLINT', 'WHITECAPS', - 'ADJAC', 'WV_FAIL', 'PAR_FAIL', 'AC_FAIL', 'OC4ME_FAIL', - 'OCNN_FAIL', 'Extra_1', 'KDM_FAIL', 'Extra_2', - 'CLOUD_AMBIGUOUS', 'CLOUD_MARGIN', 'BPAC_ON', - 'WHITE_SCATT', 'LOWRW', 'HIGHRW'] + flag_list = ["INVALID", "WATER", "LAND", "CLOUD", "SNOW_ICE", + "INLAND_WATER", "TIDAL", "COSMETIC", "SUSPECT", "HISOLZEN", + "SATURATED", "MEGLINT", "HIGHGLINT", "WHITECAPS", + "ADJAC", "WV_FAIL", "PAR_FAIL", "AC_FAIL", "OC4ME_FAIL", + "OCNN_FAIL", "Extra_1", "KDM_FAIL", "Extra_2", + "CLOUD_AMBIGUOUS", "CLOUD_MARGIN", "BPAC_ON", + "WHITE_SCATT", "LOWRW", "HIGHRW"] bits = np.array([1 << x for x in range(len(flag_list))]) @@ -273,4 +273,4 @@ def test_bitflags(self): False, False, False, True, False, True, False, False, False, True, True, False, False, True, False]) - self.assertTrue(all(mask == expected)) + assert all(mask == expected) diff --git a/satpy/tests/reader_tests/test_omps_edr.py b/satpy/tests/reader_tests/test_omps_edr.py index 2c211013cc..9aa227a200 100644 --- a/satpy/tests/reader_tests/test_omps_edr.py +++ b/satpy/tests/reader_tests/test_omps_edr.py @@ -44,120 +44,120 @@ def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" file_content = {} attrs = [] - if 'SO2NRT' in filename: - k = 'HDFEOS/SWATHS/OMPS Column Amount SO2/Data Fields/ColumnAmountSO2_TRM' + if "SO2NRT" in filename: + k = "HDFEOS/SWATHS/OMPS Column Amount SO2/Data Fields/ColumnAmountSO2_TRM" file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/ScaleFactor'] = 1.1 - file_content[k + '/attr/Offset'] = 0.1 - file_content[k + '/attr/MissingValue'] = -1 - file_content[k + '/attr/Title'] = 'Vertical Column Amount SO2 (TRM)' - file_content[k + '/attr/Units'] = 'D.U.' - file_content[k + '/attr/ValidRange'] = (-10, 2000) - k = 'HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Longitude' + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/ScaleFactor"] = 1.1 + file_content[k + "/attr/Offset"] = 0.1 + file_content[k + "/attr/MissingValue"] = -1 + file_content[k + "/attr/Title"] = "Vertical Column Amount SO2 (TRM)" + file_content[k + "/attr/Units"] = "D.U." + file_content[k + "/attr/ValidRange"] = (-10, 2000) + k = "HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Longitude" file_content[k] = DEFAULT_LON_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/ScaleFactor'] = 1.1 - file_content[k + '/attr/Offset'] = 0.1 - file_content[k + '/attr/Units'] = 'deg' - file_content[k + '/attr/MissingValue'] = -1 - file_content[k + '/attr/Title'] = 'Geodetic Longitude' - file_content[k + '/attr/ValidRange'] = (-180, 180) - k = 'HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Latitude' + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/ScaleFactor"] = 1.1 + file_content[k + "/attr/Offset"] = 0.1 + file_content[k + "/attr/Units"] = "deg" + file_content[k + "/attr/MissingValue"] = -1 + file_content[k + "/attr/Title"] = "Geodetic Longitude" + file_content[k + "/attr/ValidRange"] = (-180, 180) + k = "HDFEOS/SWATHS/OMPS Column Amount SO2/Geolocation Fields/Latitude" file_content[k] = DEFAULT_LAT_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/ScaleFactor'] = 1.1 - file_content[k + '/attr/Offset'] = 0.1 - file_content[k + '/attr/Units'] = 'deg' - file_content[k + '/attr/MissingValue'] = -1 - file_content[k + '/attr/Title'] = 'Geodetic Latitude' - file_content[k + '/attr/ValidRange'] = (-90, 90) - elif 'NMSO2' in filename: - file_content['GEOLOCATION_DATA/Longitude'] = DEFAULT_LON_DATA - file_content['GEOLOCATION_DATA/Longitude/shape'] = DEFAULT_FILE_SHAPE - file_content['GEOLOCATION_DATA/Longitude/attr/valid_max'] = 180 - file_content['GEOLOCATION_DATA/Longitude/attr/valid_min'] = -180 - file_content['GEOLOCATION_DATA/Longitude/attr/_FillValue'] = -1.26765e+30 - file_content['GEOLOCATION_DATA/Longitude/attr/long_name'] = 'Longitude' - file_content['GEOLOCATION_DATA/Longitude/attr/standard_name'] = 'longitude' - file_content['GEOLOCATION_DATA/Longitude/attr/units'] = 'degrees_east' - file_content['GEOLOCATION_DATA/Latitude'] = DEFAULT_LAT_DATA - file_content['GEOLOCATION_DATA/Latitude/shape'] = DEFAULT_FILE_SHAPE - file_content['GEOLOCATION_DATA/Latitude/attr/valid_max'] = 90 - file_content['GEOLOCATION_DATA/Latitude/attr/valid_min'] = -90 - file_content['GEOLOCATION_DATA/Latitude/attr/_FillValue'] = -1.26765e+30 - file_content['GEOLOCATION_DATA/Latitude/attr/long_name'] = 'Latitude' - file_content['GEOLOCATION_DATA/Latitude/attr/standard_name'] = 'latitude' - file_content['GEOLOCATION_DATA/Latitude/attr/units'] = 'degress_north' + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/ScaleFactor"] = 1.1 + file_content[k + "/attr/Offset"] = 0.1 + file_content[k + "/attr/Units"] = "deg" + file_content[k + "/attr/MissingValue"] = -1 + file_content[k + "/attr/Title"] = "Geodetic Latitude" + file_content[k + "/attr/ValidRange"] = (-90, 90) + elif "NMSO2" in filename: + file_content["GEOLOCATION_DATA/Longitude"] = DEFAULT_LON_DATA + file_content["GEOLOCATION_DATA/Longitude/shape"] = DEFAULT_FILE_SHAPE + file_content["GEOLOCATION_DATA/Longitude/attr/valid_max"] = 180 + file_content["GEOLOCATION_DATA/Longitude/attr/valid_min"] = -180 + file_content["GEOLOCATION_DATA/Longitude/attr/_FillValue"] = -1.26765e+30 + file_content["GEOLOCATION_DATA/Longitude/attr/long_name"] = "Longitude" + file_content["GEOLOCATION_DATA/Longitude/attr/standard_name"] = "longitude" + file_content["GEOLOCATION_DATA/Longitude/attr/units"] = "degrees_east" + file_content["GEOLOCATION_DATA/Latitude"] = DEFAULT_LAT_DATA + file_content["GEOLOCATION_DATA/Latitude/shape"] = DEFAULT_FILE_SHAPE + file_content["GEOLOCATION_DATA/Latitude/attr/valid_max"] = 90 + file_content["GEOLOCATION_DATA/Latitude/attr/valid_min"] = -90 + file_content["GEOLOCATION_DATA/Latitude/attr/_FillValue"] = -1.26765e+30 + file_content["GEOLOCATION_DATA/Latitude/attr/long_name"] = "Latitude" + file_content["GEOLOCATION_DATA/Latitude/attr/standard_name"] = "latitude" + file_content["GEOLOCATION_DATA/Latitude/attr/units"] = "degress_north" - k = 'SCIENCE_DATA/ColumnAmountSO2_TRM' + k = "SCIENCE_DATA/ColumnAmountSO2_TRM" file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/_FillValue'] = -1.26765e+30 - file_content[k + '/attr/long_name'] = 'Column Amount SO2 (TRM)' - file_content[k + '/attr/units'] = 'DU' - file_content[k + '/attr/valid_max'] = 2000 - file_content[k + '/attr/valid_min'] = -10 + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/_FillValue"] = -1.26765e+30 + file_content[k + "/attr/long_name"] = "Column Amount SO2 (TRM)" + file_content[k + "/attr/units"] = "DU" + file_content[k + "/attr/valid_max"] = 2000 + file_content[k + "/attr/valid_min"] = -10 - k = 'SCIENCE_DATA/ColumnAmountSO2_STL' + k = "SCIENCE_DATA/ColumnAmountSO2_STL" file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/_FillValue'] = -1.26765e+30 - file_content[k + '/attr/long_name'] = 'Column Amount SO2 (STL)' - file_content[k + '/attr/units'] = 'DU' + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/_FillValue"] = -1.26765e+30 + file_content[k + "/attr/long_name"] = "Column Amount SO2 (STL)" + file_content[k + "/attr/units"] = "DU" - k = 'SCIENCE_DATA/ColumnAmountSO2_TRL' + k = "SCIENCE_DATA/ColumnAmountSO2_TRL" file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/_FillValue'] = -1.26765e+30 - file_content[k + '/attr/long_name'] = 'Column Amount SO2 (TRL)' - file_content[k + '/attr/units'] = 'DU' - file_content[k + '/attr/valid_max'] = 2000 - file_content[k + '/attr/valid_min'] = -10 - file_content[k + '/attr/DIMENSION_LIST'] = [10, 10] - attrs = ['_FillValue', 'long_name', 'units', 'valid_max', 'valid_min', 'DIMENSION_LIST'] + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/_FillValue"] = -1.26765e+30 + file_content[k + "/attr/long_name"] = "Column Amount SO2 (TRL)" + file_content[k + "/attr/units"] = "DU" + file_content[k + "/attr/valid_max"] = 2000 + file_content[k + "/attr/valid_min"] = -10 + file_content[k + "/attr/DIMENSION_LIST"] = [10, 10] + attrs = ["_FillValue", "long_name", "units", "valid_max", "valid_min", "DIMENSION_LIST"] - k = 'SCIENCE_DATA/ColumnAmountSO2_TRU' + k = "SCIENCE_DATA/ColumnAmountSO2_TRU" file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/long_name'] = 'Column Amount SO2 (TRU)' - file_content[k + '/attr/units'] = 'DU' - file_content[k + '/attr/valid_max'] = 2000 - file_content[k + '/attr/valid_min'] = -10 + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/long_name"] = "Column Amount SO2 (TRU)" + file_content[k + "/attr/units"] = "DU" + file_content[k + "/attr/valid_max"] = 2000 + file_content[k + "/attr/valid_min"] = -10 # Dataset with out unit - k = 'SCIENCE_DATA/ColumnAmountSO2_PBL' + k = "SCIENCE_DATA/ColumnAmountSO2_PBL" file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/_FillValue'] = -1.26765e+30 - file_content[k + '/attr/long_name'] = 'Column Amount SO2 (PBL)' - file_content[k + '/attr/valid_max'] = 2000 - file_content[k + '/attr/valid_min'] = -10 + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/_FillValue"] = -1.26765e+30 + file_content[k + "/attr/long_name"] = "Column Amount SO2 (PBL)" + file_content[k + "/attr/valid_max"] = 2000 + file_content[k + "/attr/valid_min"] = -10 else: - for k in ['Reflectivity331', 'UVAerosolIndex']: - k = 'SCIENCE_DATA/' + k + for k in ["Reflectivity331", "UVAerosolIndex"]: + k = "SCIENCE_DATA/" + k file_content[k] = DEFAULT_FILE_DATA - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - file_content[k + '/attr/Units'] = 'Unitless' - if k == 'UVAerosolIndex': - file_content[k + '/attr/ValidRange'] = (-30, 30) - file_content[k + '/attr/Title'] = 'UV Aerosol Index' + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + file_content[k + "/attr/Units"] = "Unitless" + if k == "UVAerosolIndex": + file_content[k + "/attr/ValidRange"] = (-30, 30) + file_content[k + "/attr/Title"] = "UV Aerosol Index" else: - file_content[k + '/attr/ValidRange'] = (-0.15, 1.15) - file_content[k + '/attr/Title'] = 'Effective Surface Reflectivity at 331 nm' - file_content[k + '/attr/_FillValue'] = -1. - file_content['GEOLOCATION_DATA/Longitude'] = DEFAULT_LON_DATA - file_content['GEOLOCATION_DATA/Longitude/shape'] = DEFAULT_FILE_SHAPE - file_content['GEOLOCATION_DATA/Longitude/attr/ValidRange'] = (-180, 180) - file_content['GEOLOCATION_DATA/Longitude/attr/_FillValue'] = -999. - file_content['GEOLOCATION_DATA/Longitude/attr/Title'] = 'Geodetic Longitude' - file_content['GEOLOCATION_DATA/Longitude/attr/Units'] = 'deg' - file_content['GEOLOCATION_DATA/Latitude'] = DEFAULT_LAT_DATA - file_content['GEOLOCATION_DATA/Latitude/shape'] = DEFAULT_FILE_SHAPE - file_content['GEOLOCATION_DATA/Latitude/attr/ValidRange'] = (-90, 90) - file_content['GEOLOCATION_DATA/Latitude/attr/_FillValue'] = -999. - file_content['GEOLOCATION_DATA/Latitude/attr/Title'] = 'Geodetic Latitude' - file_content['GEOLOCATION_DATA/Latitude/attr/Units'] = 'deg' + file_content[k + "/attr/ValidRange"] = (-0.15, 1.15) + file_content[k + "/attr/Title"] = "Effective Surface Reflectivity at 331 nm" + file_content[k + "/attr/_FillValue"] = -1. + file_content["GEOLOCATION_DATA/Longitude"] = DEFAULT_LON_DATA + file_content["GEOLOCATION_DATA/Longitude/shape"] = DEFAULT_FILE_SHAPE + file_content["GEOLOCATION_DATA/Longitude/attr/ValidRange"] = (-180, 180) + file_content["GEOLOCATION_DATA/Longitude/attr/_FillValue"] = -999. + file_content["GEOLOCATION_DATA/Longitude/attr/Title"] = "Geodetic Longitude" + file_content["GEOLOCATION_DATA/Longitude/attr/Units"] = "deg" + file_content["GEOLOCATION_DATA/Latitude"] = DEFAULT_LAT_DATA + file_content["GEOLOCATION_DATA/Latitude/shape"] = DEFAULT_FILE_SHAPE + file_content["GEOLOCATION_DATA/Latitude/attr/ValidRange"] = (-90, 90) + file_content["GEOLOCATION_DATA/Latitude/attr/_FillValue"] = -999. + file_content["GEOLOCATION_DATA/Latitude/attr/Title"] = "Geodetic Latitude" + file_content["GEOLOCATION_DATA/Latitude/attr/Units"] = "deg" convert_file_content_to_data_array(file_content, attrs) return file_content @@ -172,12 +172,12 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.omps_edr import EDREOSFileHandler, EDRFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(EDRFileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(EDRFileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True - self.p2 = mock.patch.object(EDREOSFileHandler, '__bases__', (EDRFileHandler,)) + self.p2 = mock.patch.object(EDREOSFileHandler, "__bases__", (EDRFileHandler,)) self.fake_handler2 = self.p2.start() self.p2.is_local = True @@ -191,72 +191,72 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5', - 'OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5', - 'OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5', + "OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5", + "OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5", + "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) - self.assertEqual(len(loadables), 3) + assert len(loadables) == 3 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_basic_load_so2(self): """Test basic load of so2 datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5', - 'OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5', - 'OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5', + "OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5", + "OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5", + "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) - self.assertEqual(len(loadables), 3) + assert len(loadables) == 3 r.create_filehandlers(loadables) - ds = r.load(['so2_trm']) - self.assertEqual(len(ds), 1) + ds = r.load(["so2_trm"]) + assert len(ds) == 1 for d in ds.values(): - self.assertEqual(d.attrs['resolution'], 50000) - self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + assert d.attrs["resolution"] == 50000 + assert d.shape == DEFAULT_FILE_SHAPE + assert "area" in d.attrs + assert d.attrs["area"] is not None - ds = r.load(['tcso2_trm_sampo']) - self.assertEqual(len(ds), 1) + ds = r.load(["tcso2_trm_sampo"]) + assert len(ds) == 1 for d in ds.values(): - self.assertEqual(d.attrs['resolution'], 50000) - self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) + assert d.attrs["resolution"] == 50000 + assert d.shape == DEFAULT_FILE_SHAPE - ds = r.load(['tcso2_stl_sampo']) - self.assertEqual(len(ds), 0) + ds = r.load(["tcso2_stl_sampo"]) + assert len(ds) == 0 # Dataset without _FillValue - ds = r.load(['tcso2_tru_sampo']) - self.assertEqual(len(ds), 1) + ds = r.load(["tcso2_tru_sampo"]) + assert len(ds) == 1 # Dataset without unit - ds = r.load(['tcso2_pbl_sampo']) - self.assertEqual(len(ds), 0) + ds = r.load(["tcso2_pbl_sampo"]) + assert len(ds) == 0 def test_basic_load_to3(self): """Test basic load of to3 datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5', - 'OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5', - 'OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5', + "OMPS-NPP-TC_EDR_SO2NRT-2016m0607t192031-o00001-2016m0607t192947.he5", + "OMPS-NPP-TC_EDR_TO3-v1.0-2016m0607t192031-o00001-2016m0607t192947.h5", + "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) - self.assertEqual(len(loadables), 3) + assert len(loadables) == 3 r.create_filehandlers(loadables) - ds = r.load(['reflectivity_331', 'uvaerosol_index']) - self.assertEqual(len(ds), 2) + ds = r.load(["reflectivity_331", "uvaerosol_index"]) + assert len(ds) == 2 for d in ds.values(): - self.assertEqual(d.attrs['resolution'], 50000) - self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + assert d.attrs["resolution"] == 50000 + assert d.shape == DEFAULT_FILE_SHAPE + assert "area" in d.attrs + assert d.attrs["area"] is not None - @mock.patch('satpy.readers.hdf5_utils.HDF5FileHandler._get_reference') - @mock.patch('h5py.File') + @mock.patch("satpy.readers.hdf5_utils.HDF5FileHandler._get_reference") + @mock.patch("h5py.File") def test_load_so2_DIMENSION_LIST(self, mock_h5py_file, mock_hdf5_utils_get_reference): """Test load of so2 datasets with DIMENSION_LIST.""" from satpy.readers import load_reader @@ -264,9 +264,9 @@ def test_load_so2_DIMENSION_LIST(self, mock_h5py_file, mock_hdf5_utils_get_refer mock_hdf5_utils_get_reference.return_value = [[[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]]] r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5', + "OMPS-NPP_NMSO2-PCA-L2_v1.1_2018m1129t112824_o00001_2018m1129t114426.h5", ]) r.create_filehandlers(loadables) - ds = r.load(['tcso2_trl_sampo']) - self.assertEqual(len(ds), 1) + ds = r.load(["tcso2_trl_sampo"]) + assert len(ds) == 1 diff --git a/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py b/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py index 937470724f..661900e650 100644 --- a/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py +++ b/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py @@ -28,48 +28,48 @@ class TestSAFENC(unittest.TestCase): """Test various SAFE SAR L2 OCN file handlers.""" - @mock.patch('satpy.readers.safe_sar_l2_ocn.xr') + @mock.patch("satpy.readers.safe_sar_l2_ocn.xr") def setUp(self, xr_): """Set up the tests.""" from satpy.readers.safe_sar_l2_ocn import SAFENC - self.channels = ['owiWindSpeed', 'owiLon', 'owiLat', 'owiHs', 'owiNrcs', 'foo', - 'owiPolarisationName', 'owiCalConstObsi'] + self.channels = ["owiWindSpeed", "owiLon", "owiLat", "owiHs", "owiNrcs", "foo", + "owiPolarisationName", "owiCalConstObsi"] # Mock file access to return a fake dataset. self.dummy3d = np.zeros((2, 2, 1)) self.dummy2d = np.zeros((2, 2)) self.dummy1d = np.zeros((2)) self.band = 1 self.nc = xr.Dataset( - {'owiWindSpeed': xr.DataArray(self.dummy2d, dims=('owiAzSize', 'owiRaSize'), attrs={'_FillValue': np.nan}), - 'owiLon': xr.DataArray(data=self.dummy2d, dims=('owiAzSize', 'owiRaSize')), - 'owiLat': xr.DataArray(data=self.dummy2d, dims=('owiAzSize', 'owiRaSize')), - 'owiHs': xr.DataArray(data=self.dummy3d, dims=('owiAzSize', 'owiRaSize', 'oswPartition')), - 'owiNrcs': xr.DataArray(data=self.dummy3d, dims=('owiAzSize', 'owiRaSize', 'oswPolarization')), - 'foo': xr.DataArray(self.dummy2d, dims=('owiAzSize', 'owiRaSize')), - 'owiPolarisationName': xr.DataArray(self.dummy1d, dims=('owiPolarisation')), - 'owiCalConstObsi': xr.DataArray(self.dummy1d, dims=('owiIncSize')) + {"owiWindSpeed": xr.DataArray(self.dummy2d, dims=("owiAzSize", "owiRaSize"), attrs={"_FillValue": np.nan}), + "owiLon": xr.DataArray(data=self.dummy2d, dims=("owiAzSize", "owiRaSize")), + "owiLat": xr.DataArray(data=self.dummy2d, dims=("owiAzSize", "owiRaSize")), + "owiHs": xr.DataArray(data=self.dummy3d, dims=("owiAzSize", "owiRaSize", "oswPartition")), + "owiNrcs": xr.DataArray(data=self.dummy3d, dims=("owiAzSize", "owiRaSize", "oswPolarization")), + "foo": xr.DataArray(self.dummy2d, dims=("owiAzSize", "owiRaSize")), + "owiPolarisationName": xr.DataArray(self.dummy1d, dims=("owiPolarisation")), + "owiCalConstObsi": xr.DataArray(self.dummy1d, dims=("owiIncSize")) }, - attrs={'_FillValue': np.nan, - 'missionName': 'S1A'}) + attrs={"_FillValue": np.nan, + "missionName": "S1A"}) xr_.open_dataset.return_value = self.nc # Instantiate reader using the mocked open_dataset() method. Also, make # the reader believe all abstract methods have been implemented. - self.reader = SAFENC(filename='dummy', - filename_info={'start_time': 0, - 'end_time': 0, - 'fstart_time': 0, - 'fend_time': 0, - 'polarization': 'vv'}, + self.reader = SAFENC(filename="dummy", + filename_info={"start_time": 0, + "end_time": 0, + "fstart_time": 0, + "fend_time": 0, + "polarization": "vv"}, filetype_info={}) def test_init(self): """Test reader initialization.""" - self.assertEqual(self.reader.start_time, 0) - self.assertEqual(self.reader.end_time, 0) - self.assertEqual(self.reader.fstart_time, 0) - self.assertEqual(self.reader.fend_time, 0) + assert self.reader.start_time == 0 + assert self.reader.end_time == 0 + assert self.reader.fstart_time == 0 + assert self.reader.fend_time == 0 def test_get_dataset(self): """Test getting a dataset.""" @@ -77,6 +77,4 @@ def test_get_dataset(self): dt = self.reader.get_dataset( key=make_dataid(name=ch), info={}) # ... this only compares the valid (unmasked) elements - self.assertTrue(np.all(self.nc[ch] == dt.to_masked_array()), - msg='get_dataset() returns invalid data for ' - 'dataset {}'.format(ch)) + assert np.all(self.nc[ch] == dt.to_masked_array()), f"get_dataset() returns invalid data for dataset {ch}" diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index e796c11b77..4ac4d97cfe 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -33,54 +33,54 @@ class TestSAFEGRD(unittest.TestCase): """Test the SAFE GRD file handler.""" - @mock.patch('rasterio.open') + @mock.patch("rasterio.open") def setUp(self, mocked_rio_open): """Set up the test case.""" from satpy.readers.sar_c_safe import SAFEGRD - filename_info = {'mission_id': 'S1A', 'dataset_name': 'foo', 'start_time': 0, 'end_time': 0, - 'polarization': 'vv'} - filetype_info = 'bla' + filename_info = {"mission_id": "S1A", "dataset_name": "foo", "start_time": 0, "end_time": 0, + "polarization": "vv"} + filetype_info = "bla" self.noisefh = mock.MagicMock() - self.noisefh.get_noise_correction.return_value = xr.DataArray(np.zeros((2, 2)), dims=['y', 'x']) + self.noisefh.get_noise_correction.return_value = xr.DataArray(np.zeros((2, 2)), dims=["y", "x"]) self.calfh = mock.MagicMock() self.calfh.get_calibration_constant.return_value = 1 - self.calfh.get_calibration.return_value = xr.DataArray(np.ones((2, 2)), dims=['y', 'x']) + self.calfh.get_calibration.return_value = xr.DataArray(np.ones((2, 2)), dims=["y", "x"]) self.annotationfh = mock.MagicMock() - self.test_fh = SAFEGRD('S1A_IW_GRDH_1SDV_20190201T024655_20190201T024720_025730_02DC2A_AE07.SAFE/measurement/' - 's1a-iw-grd-vv-20190201t024655-20190201t024720-025730-02dc2a-001.tiff', + self.test_fh = SAFEGRD("S1A_IW_GRDH_1SDV_20190201T024655_20190201T024720_025730_02DC2A_AE07.SAFE/measurement/" + "s1a-iw-grd-vv-20190201t024655-20190201t024720-025730-02dc2a-001.tiff", filename_info, filetype_info, self.calfh, self.noisefh, self.annotationfh) self.mocked_rio_open = mocked_rio_open def test_instantiate(self): """Test initialization of file handlers.""" - assert self.test_fh._polarization == 'vv' + assert self.test_fh._polarization == "vv" assert self.test_fh.calibration == self.calfh assert self.test_fh.noise == self.noisefh self.mocked_rio_open.assert_called() - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_read_calibrated_natural(self, mocked_xarray_open): """Test the calibration routines.""" calibration = mock.MagicMock() calibration.name = "sigma_nought" mocked_xarray_open.return_value.__getitem__.return_value = xr.DataArray(da.from_array(np.array([[0, 1], [2, 3]])), - dims=['y', 'x']) + dims=["y", "x"]) xarr = self.test_fh.get_dataset(DataQuery(name="measurement", polarization="vv", - calibration=calibration, quantity='natural'), info=dict()) + calibration=calibration, quantity="natural"), info=dict()) np.testing.assert_allclose(xarr, [[np.nan, 2], [5, 10]]) - @mock.patch('xarray.open_dataset') + @mock.patch("xarray.open_dataset") def test_read_calibrated_dB(self, mocked_xarray_open): """Test the calibration routines.""" calibration = mock.MagicMock() calibration.name = "sigma_nought" mocked_xarray_open.return_value.__getitem__.return_value = xr.DataArray(da.from_array(np.array([[0, 1], [2, 3]])), - dims=['y', 'x']) + dims=["y", "x"]) xarr = self.test_fh.get_dataset(DataQuery(name="measurement", polarization="vv", - calibration=calibration, quantity='dB'), info=dict()) + calibration=calibration, quantity="dB"), info=dict()) np.testing.assert_allclose(xarr, [[np.nan, 3.0103], [6.9897, 10]]) def test_read_lon_lats(self): @@ -109,7 +109,7 @@ def __init__(self, *args): FakeGCP(15, 0, 0, 3, 0), ] - crs = dict(init='epsg:4326') + crs = dict(init="epsg:4326") self.mocked_rio_open.return_value.gcps = [gcps, crs] self.mocked_rio_open.return_value.shape = [16, 16] diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py index 605f595e1f..e71534fbd2 100644 --- a/satpy/tests/reader_tests/test_satpy_cf_nc.py +++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py @@ -42,53 +42,53 @@ def _create_test_netcdf(filename, resolution=742): lon = -13.0 * data_visir lat = xr.DataArray(lat, - dims=('y', 'x'), - attrs={'name': 'lat', - 'standard_name': 'latitude', - 'modifiers': np.array([])}) + dims=("y", "x"), + attrs={"name": "lat", + "standard_name": "latitude", + "modifiers": np.array([])}) lon = xr.DataArray(lon, - dims=('y', 'x'), - attrs={'name': 'lon', - 'standard_name': 'longitude', - 'modifiers': np.array([])}) + dims=("y", "x"), + attrs={"name": "lon", + "standard_name": "longitude", + "modifiers": np.array([])}) solar_zenith_angle_i = xr.DataArray(data_visir, - dims=('y', 'x'), - attrs={'name': 'solar_zenith_angle', - 'coordinates': 'lat lon', - 'resolution': resolution}) + dims=("y", "x"), + attrs={"name": "solar_zenith_angle", + "coordinates": "lat lon", + "resolution": resolution}) scene = Scene() - scene.attrs['sensor'] = ['viirs'] + scene.attrs["sensor"] = ["viirs"] scene_dict = { - 'lat': lat, - 'lon': lon, - 'solar_zenith_angle': solar_zenith_angle_i + "lat": lat, + "lon": lon, + "solar_zenith_angle": solar_zenith_angle_i } tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) common_attrs = { - 'start_time': tstart, - 'end_time': tend, - 'platform_name': 'NOAA 20', - 'orbit_number': 99999 + "start_time": tstart, + "end_time": tend, + "platform_name": "NOAA 20", + "orbit_number": 99999 } for key in scene_dict: scene[key] = scene_dict[key] - if key != 'swath_data': + if key != "swath_data": scene[key].attrs.update(common_attrs) - scene.save_datasets(writer='cf', + scene.save_datasets(writer="cf", filename=filename, - engine='h5netcdf', + engine="h5netcdf", flatten_attrs=True, pretty=True) return filename @pytest.fixture(scope="session") -def _cf_scene(): +def cf_scene(): tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) data_visir = np.array([[1, 2], [3, 4]]) @@ -100,15 +100,15 @@ def _cf_scene(): lon = -13.0 * np.array([[1, 2], [3, 4]]) proj_dict = { - 'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, - 'lon_0': 0.0, 'proj': 'geos', 'units': 'm' + "a": 6378169.0, "b": 6356583.8, "h": 35785831.0, + "lon_0": 0.0, "proj": "geos", "units": "m" } x_size, y_size = data_visir.shape area_extent = (339045.5577, 4365586.6063, 1068143.527, 4803645.4685) area = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, x_size, y_size, @@ -120,131 +120,131 @@ def _cf_scene(): x_visir = x[0, :] common_attrs = { - 'start_time': tstart, - 'end_time': tend, - 'platform_name': 'tirosn', - 'orbit_number': 99999, - 'area': area + "start_time": tstart, + "end_time": tend, + "platform_name": "tirosn", + "orbit_number": 99999, + "area": area } vis006 = xr.DataArray(data_visir, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006)}, + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir, "acq_time": ("y", time_vis006)}, attrs={ - 'name': 'image0', 'id_tag': 'ch_r06', - 'coordinates': 'lat lon', 'resolution': 1000, 'calibration': 'reflectance', - 'wavelength': WavelengthRange(min=0.58, central=0.63, max=0.68, unit='µm'), - 'orbital_parameters': { - 'projection_longitude': 1, - 'projection_latitude': 1, - 'projection_altitude': 1, - 'satellite_nominal_longitude': 1, - 'satellite_nominal_latitude': 1, - 'satellite_actual_longitude': 1, - 'satellite_actual_latitude': 1, - 'satellite_actual_altitude': 1, - 'nadir_longitude': 1, - 'nadir_latitude': 1, - 'only_in_1': False + "name": "image0", "id_tag": "ch_r06", + "coordinates": "lat lon", "resolution": 1000, "calibration": "reflectance", + "wavelength": WavelengthRange(min=0.58, central=0.63, max=0.68, unit="µm"), + "orbital_parameters": { + "projection_longitude": 1, + "projection_latitude": 1, + "projection_altitude": 1, + "satellite_nominal_longitude": 1, + "satellite_nominal_latitude": 1, + "satellite_actual_longitude": 1, + "satellite_actual_latitude": 1, + "satellite_actual_altitude": 1, + "nadir_longitude": 1, + "nadir_latitude": 1, + "only_in_1": False } }) ir_108 = xr.DataArray(data_visir, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006)}, - attrs={'name': 'image1', 'id_tag': 'ch_tb11', 'coordinates': 'lat lon'}) + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir, "acq_time": ("y", time_vis006)}, + attrs={"name": "image1", "id_tag": "ch_tb11", "coordinates": "lat lon"}) qual_f = xr.DataArray(qual_data, - dims=('y', 'z'), - coords={'y': y_visir, 'z': z_visir, 'acq_time': ('y', time_vis006)}, + dims=("y", "z"), + coords={"y": y_visir, "z": z_visir, "acq_time": ("y", time_vis006)}, attrs={ - 'name': 'qual_flags', - 'id_tag': 'qual_flags' + "name": "qual_flags", + "id_tag": "qual_flags" }) lat = xr.DataArray(lat, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir}, + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir}, attrs={ - 'name': 'lat', - 'standard_name': 'latitude', - 'modifiers': np.array([]) + "name": "lat", + "standard_name": "latitude", + "modifiers": np.array([]) }) lon = xr.DataArray(lon, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir}, + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir}, attrs={ - 'name': 'lon', - 'standard_name': 'longitude', - 'modifiers': np.array([]) + "name": "lon", + "standard_name": "longitude", + "modifiers": np.array([]) }) # for prefix testing prefix_data = xr.DataArray(data_visir, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir}, + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir}, attrs={ - 'name': '1', 'id_tag': 'ch_r06', - 'coordinates': 'lat lon', 'resolution': 1000, 'calibration': 'reflectance', - 'wavelength': WavelengthRange(min=0.58, central=0.63, max=0.68, unit='µm'), - 'area': area + "name": "1", "id_tag": "ch_r06", + "coordinates": "lat lon", "resolution": 1000, "calibration": "reflectance", + "wavelength": WavelengthRange(min=0.58, central=0.63, max=0.68, unit="µm"), + "area": area }) # for swath testing area = SwathDefinition(lons=lon, lats=lat) swath_data = prefix_data.copy() - swath_data.attrs.update({'name': 'swath_data', 'area': area}) + swath_data.attrs.update({"name": "swath_data", "area": area}) scene = Scene() - scene.attrs['sensor'] = ['avhrr-1', 'avhrr-2', 'avhrr-3'] + scene.attrs["sensor"] = ["avhrr-1", "avhrr-2", "avhrr-3"] scene_dict = { - 'image0': vis006, - 'image1': ir_108, - 'swath_data': swath_data, - '1': prefix_data, - 'lat': lat, - 'lon': lon, - 'qual_flags': qual_f + "image0": vis006, + "image1": ir_108, + "swath_data": swath_data, + "1": prefix_data, + "lat": lat, + "lon": lon, + "qual_flags": qual_f } for key in scene_dict: scene[key] = scene_dict[key] - if key != 'swath_data': + if key != "swath_data": scene[key].attrs.update(common_attrs) return scene -@pytest.fixture -def _nc_filename(tmp_path): +@pytest.fixture() +def nc_filename(tmp_path): now = datetime.utcnow() - filename = f'testingcfwriter{now:%Y%j%H%M%S}-viirs-mband-20201007075915-20201007080744.nc' + filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-mband-20201007075915-20201007080744.nc" return str(tmp_path / filename) -@pytest.fixture -def _nc_filename_i(tmp_path): +@pytest.fixture() +def nc_filename_i(tmp_path): now = datetime.utcnow() - filename = f'testingcfwriter{now:%Y%j%H%M%S}-viirs-iband-20201007075915-20201007080744.nc' + filename = f"testingcfwriter{now:%Y%j%H%M%S}-viirs-iband-20201007075915-20201007080744.nc" return str(tmp_path / filename) class TestCFReader: """Test case for CF reader.""" - def test_write_and_read_with_area_definition(self, _cf_scene, _nc_filename): + def test_write_and_read_with_area_definition(self, cf_scene, nc_filename): """Save a dataset with an area definition to file with cf_writer and read the data again.""" - _cf_scene.save_datasets(writer='cf', - filename=_nc_filename, - engine='h5netcdf', - flatten_attrs=True, - pretty=True) - scn_ = Scene(reader='satpy_cf_nc', - filenames=[_nc_filename]) - scn_.load(['image0', 'image1', 'lat']) - np.testing.assert_array_equal(scn_['image0'].data, _cf_scene['image0'].data) - np.testing.assert_array_equal(scn_['lat'].data, _cf_scene['lat'].data) # lat loaded as dataset - np.testing.assert_array_equal(scn_['image0'].coords['lon'], _cf_scene['lon'].data) # lon loded as coord - assert isinstance(scn_['image0'].attrs['wavelength'], WavelengthRange) - expected_area = _cf_scene['image0'].attrs['area'] - actual_area = scn_['image0'].attrs['area'] + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="h5netcdf", + flatten_attrs=True, + pretty=True) + scn_ = Scene(reader="satpy_cf_nc", + filenames=[nc_filename]) + scn_.load(["image0", "image1", "lat"]) + np.testing.assert_array_equal(scn_["image0"].data, cf_scene["image0"].data) + np.testing.assert_array_equal(scn_["lat"].data, cf_scene["lat"].data) # lat loaded as dataset + np.testing.assert_array_equal(scn_["image0"].coords["lon"], cf_scene["lon"].data) # lon loded as coord + assert isinstance(scn_["image0"].attrs["wavelength"], WavelengthRange) + expected_area = cf_scene["image0"].attrs["area"] + actual_area = scn_["image0"].attrs["area"] assert pytest.approx(expected_area.area_extent, 0.000001) == actual_area.area_extent assert expected_area.proj_dict == actual_area.proj_dict assert expected_area.shape == actual_area.shape @@ -252,189 +252,189 @@ def test_write_and_read_with_area_definition(self, _cf_scene, _nc_filename): assert expected_area.description == actual_area.description assert expected_area.proj_dict == actual_area.proj_dict - def test_write_and_read_with_swath_definition(self, _cf_scene, _nc_filename): + def test_write_and_read_with_swath_definition(self, cf_scene, nc_filename): """Save a dataset with a swath definition to file with cf_writer and read the data again.""" - _cf_scene.save_datasets(writer='cf', - filename=_nc_filename, - engine='h5netcdf', - flatten_attrs=True, - pretty=True, - datasets=["swath_data"]) - scn_ = Scene(reader='satpy_cf_nc', - filenames=[_nc_filename]) - scn_.load(['swath_data']) - expected_area = _cf_scene['swath_data'].attrs['area'] - actual_area = scn_['swath_data'].attrs['area'] + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="h5netcdf", + flatten_attrs=True, + pretty=True, + datasets=["swath_data"]) + scn_ = Scene(reader="satpy_cf_nc", + filenames=[nc_filename]) + scn_.load(["swath_data"]) + expected_area = cf_scene["swath_data"].attrs["area"] + actual_area = scn_["swath_data"].attrs["area"] assert expected_area.shape == actual_area.shape np.testing.assert_array_equal(expected_area.lons.data, actual_area.lons.data) np.testing.assert_array_equal(expected_area.lats.data, actual_area.lats.data) def test_fix_modifier_attr(self): """Check that fix modifier can handle empty list as modifier attribute.""" - reader = SatpyCFFileHandler('filename', + reader = SatpyCFFileHandler("filename", {}, - {'filetype': 'info'}) - ds_info = {'modifiers': []} + {"filetype": "info"}) + ds_info = {"modifiers": []} reader.fix_modifier_attr(ds_info) - assert ds_info['modifiers'] == () + assert ds_info["modifiers"] == () - def test_read_prefixed_channels(self, _cf_scene, _nc_filename): + def test_read_prefixed_channels(self, cf_scene, nc_filename): """Check channels starting with digit is prefixed and read back correctly.""" - _cf_scene.save_datasets(writer='cf', - filename=_nc_filename, - engine='netcdf4', - flatten_attrs=True, - pretty=True) - scn_ = Scene(reader='satpy_cf_nc', - filenames=[_nc_filename]) - scn_.load(['1']) - np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loaded as coord - - scn_ = Scene(reader='satpy_cf_nc', - filenames=[_nc_filename], reader_kwargs={}) - scn_.load(['1']) - np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loaded as coord + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="netcdf4", + flatten_attrs=True, + pretty=True) + scn_ = Scene(reader="satpy_cf_nc", + filenames=[nc_filename]) + scn_.load(["1"]) + np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loaded as coord + + scn_ = Scene(reader="satpy_cf_nc", + filenames=[nc_filename], reader_kwargs={}) + scn_.load(["1"]) + np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loaded as coord # Check that variables starting with a digit is written to filename variable prefixed - with xr.open_dataset(_nc_filename) as ds_disk: - np.testing.assert_array_equal(ds_disk['CHANNEL_1'].data, _cf_scene['1'].data) + with xr.open_dataset(nc_filename) as ds_disk: + np.testing.assert_array_equal(ds_disk["CHANNEL_1"].data, cf_scene["1"].data) - def test_read_prefixed_channels_include_orig_name(self, _cf_scene, _nc_filename): + def test_read_prefixed_channels_include_orig_name(self, cf_scene, nc_filename): """Check channels starting with digit and includeed orig name is prefixed and read back correctly.""" - _cf_scene.save_datasets(writer='cf', - filename=_nc_filename, - engine='netcdf4', - flatten_attrs=True, - pretty=True, - include_orig_name=True) - scn_ = Scene(reader='satpy_cf_nc', - filenames=[_nc_filename]) - scn_.load(['1']) - np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loaded as coord - - assert scn_['1'].attrs['original_name'] == '1' + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="netcdf4", + flatten_attrs=True, + pretty=True, + include_orig_name=True) + scn_ = Scene(reader="satpy_cf_nc", + filenames=[nc_filename]) + scn_.load(["1"]) + np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loaded as coord + + assert scn_["1"].attrs["original_name"] == "1" # Check that variables starting with a digit is written to filename variable prefixed - with xr.open_dataset(_nc_filename) as ds_disk: - np.testing.assert_array_equal(ds_disk['CHANNEL_1'].data, _cf_scene['1'].data) + with xr.open_dataset(nc_filename) as ds_disk: + np.testing.assert_array_equal(ds_disk["CHANNEL_1"].data, cf_scene["1"].data) - def test_read_prefixed_channels_by_user(self, _cf_scene, _nc_filename): + def test_read_prefixed_channels_by_user(self, cf_scene, nc_filename): """Check channels starting with digit is prefixed by user and read back correctly.""" - _cf_scene.save_datasets(writer='cf', - filename=_nc_filename, - engine='netcdf4', - flatten_attrs=True, - pretty=True, - numeric_name_prefix='USER') - scn_ = Scene(reader='satpy_cf_nc', - filenames=[_nc_filename], reader_kwargs={'numeric_name_prefix': 'USER'}) - scn_.load(['1']) - np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loded as coord + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="netcdf4", + flatten_attrs=True, + pretty=True, + numeric_name_prefix="USER") + scn_ = Scene(reader="satpy_cf_nc", + filenames=[nc_filename], reader_kwargs={"numeric_name_prefix": "USER"}) + scn_.load(["1"]) + np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loded as coord # Check that variables starting with a digit is written to filename variable prefixed - with xr.open_dataset(_nc_filename) as ds_disk: - np.testing.assert_array_equal(ds_disk['USER1'].data, _cf_scene['1'].data) + with xr.open_dataset(nc_filename) as ds_disk: + np.testing.assert_array_equal(ds_disk["USER1"].data, cf_scene["1"].data) - def test_read_prefixed_channels_by_user2(self, _cf_scene, _nc_filename): + def test_read_prefixed_channels_by_user2(self, cf_scene, nc_filename): """Check channels starting with digit is prefixed by user when saving and read back correctly without prefix.""" - _cf_scene.save_datasets(writer='cf', - filename=_nc_filename, - engine='netcdf4', - flatten_attrs=True, - pretty=True, - include_orig_name=False, - numeric_name_prefix='USER') - scn_ = Scene(reader='satpy_cf_nc', - filenames=[_nc_filename]) - scn_.load(['USER1']) - np.testing.assert_array_equal(scn_['USER1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['USER1'].coords['lon'], _cf_scene['lon'].data) # lon loded as coord - - def test_read_prefixed_channels_by_user_include_prefix(self, _cf_scene, _nc_filename): + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="netcdf4", + flatten_attrs=True, + pretty=True, + include_orig_name=False, + numeric_name_prefix="USER") + scn_ = Scene(reader="satpy_cf_nc", + filenames=[nc_filename]) + scn_.load(["USER1"]) + np.testing.assert_array_equal(scn_["USER1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["USER1"].coords["lon"], cf_scene["lon"].data) # lon loded as coord + + def test_read_prefixed_channels_by_user_include_prefix(self, cf_scene, nc_filename): """Check channels starting with digit is prefixed by user and include original name when saving.""" - _cf_scene.save_datasets(writer='cf', - filename=_nc_filename, - engine='netcdf4', - flatten_attrs=True, - pretty=True, - include_orig_name=True, - numeric_name_prefix='USER') - scn_ = Scene(reader='satpy_cf_nc', - filenames=[_nc_filename]) - scn_.load(['1']) - np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loded as coord - - def test_read_prefixed_channels_by_user_no_prefix(self, _cf_scene, _nc_filename): + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="netcdf4", + flatten_attrs=True, + pretty=True, + include_orig_name=True, + numeric_name_prefix="USER") + scn_ = Scene(reader="satpy_cf_nc", + filenames=[nc_filename]) + scn_.load(["1"]) + np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loded as coord + + def test_read_prefixed_channels_by_user_no_prefix(self, cf_scene, nc_filename): """Check channels starting with digit is not prefixed by user.""" with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=UserWarning, message=".*starts with a digit.*") - _cf_scene.save_datasets(writer='cf', - filename=_nc_filename, - engine='netcdf4', - flatten_attrs=True, - pretty=True, - numeric_name_prefix='') - scn_ = Scene(reader='satpy_cf_nc', - filenames=[_nc_filename]) - scn_.load(['1']) - np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data) - np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loded as coord - - def test_orbital_parameters(self, _cf_scene, _nc_filename): + cf_scene.save_datasets(writer="cf", + filename=nc_filename, + engine="netcdf4", + flatten_attrs=True, + pretty=True, + numeric_name_prefix="") + scn_ = Scene(reader="satpy_cf_nc", + filenames=[nc_filename]) + scn_.load(["1"]) + np.testing.assert_array_equal(scn_["1"].data, cf_scene["1"].data) + np.testing.assert_array_equal(scn_["1"].coords["lon"], cf_scene["lon"].data) # lon loded as coord + + def test_orbital_parameters(self, cf_scene, nc_filename): """Test that the orbital parameters in attributes are handled correctly.""" - _cf_scene.save_datasets(writer='cf', - filename=_nc_filename) - scn_ = Scene(reader='satpy_cf_nc', - filenames=[_nc_filename]) - scn_.load(['image0']) - orig_attrs = _cf_scene['image0'].attrs['orbital_parameters'] - new_attrs = scn_['image0'].attrs['orbital_parameters'] + cf_scene.save_datasets(writer="cf", + filename=nc_filename) + scn_ = Scene(reader="satpy_cf_nc", + filenames=[nc_filename]) + scn_.load(["image0"]) + orig_attrs = cf_scene["image0"].attrs["orbital_parameters"] + new_attrs = scn_["image0"].attrs["orbital_parameters"] assert isinstance(new_attrs, dict) for key in orig_attrs: assert orig_attrs[key] == new_attrs[key] - def test_write_and_read_from_two_files(self, _nc_filename, _nc_filename_i): + def test_write_and_read_from_two_files(self, nc_filename, nc_filename_i): """Save two datasets with different resolution and read the solar_zenith_angle again.""" - _create_test_netcdf(_nc_filename, resolution=742) - _create_test_netcdf(_nc_filename_i, resolution=371) - scn_ = Scene(reader='satpy_cf_nc', - filenames=[_nc_filename, _nc_filename_i]) - scn_.load(['solar_zenith_angle'], resolution=742) - assert scn_['solar_zenith_angle'].attrs['resolution'] == 742 + _create_test_netcdf(nc_filename, resolution=742) + _create_test_netcdf(nc_filename_i, resolution=371) + scn_ = Scene(reader="satpy_cf_nc", + filenames=[nc_filename, nc_filename_i]) + scn_.load(["solar_zenith_angle"], resolution=742) + assert scn_["solar_zenith_angle"].attrs["resolution"] == 742 scn_.unload() - scn_.load(['solar_zenith_angle'], resolution=371) - assert scn_['solar_zenith_angle'].attrs['resolution'] == 371 + scn_.load(["solar_zenith_angle"], resolution=371) + assert scn_["solar_zenith_angle"].attrs["resolution"] == 371 - def test_dataid_attrs_equal_matching_dataset(self, _cf_scene, _nc_filename): + def test_dataid_attrs_equal_matching_dataset(self, cf_scene, nc_filename): """Check that get_dataset returns valid dataset when keys matches.""" from satpy.dataset.dataid import DataID, default_id_keys_config - _create_test_netcdf(_nc_filename, resolution=742) - reader = SatpyCFFileHandler(_nc_filename, {}, {'filetype': 'info'}) - ds_id = DataID(default_id_keys_config, name='solar_zenith_angle', resolution=742, modifiers=()) + _create_test_netcdf(nc_filename, resolution=742) + reader = SatpyCFFileHandler(nc_filename, {}, {"filetype": "info"}) + ds_id = DataID(default_id_keys_config, name="solar_zenith_angle", resolution=742, modifiers=()) res = reader.get_dataset(ds_id, {}) - assert res.attrs['resolution'] == 742 + assert res.attrs["resolution"] == 742 - def test_dataid_attrs_equal_not_matching_dataset(self, _cf_scene, _nc_filename): + def test_dataid_attrs_equal_not_matching_dataset(self, cf_scene, nc_filename): """Check that get_dataset returns None when key(s) are not matching.""" from satpy.dataset.dataid import DataID, default_id_keys_config - _create_test_netcdf(_nc_filename, resolution=742) - reader = SatpyCFFileHandler(_nc_filename, {}, {'filetype': 'info'}) + _create_test_netcdf(nc_filename, resolution=742) + reader = SatpyCFFileHandler(nc_filename, {}, {"filetype": "info"}) not_existing_resolution = 9999999 - ds_id = DataID(default_id_keys_config, name='solar_zenith_angle', resolution=not_existing_resolution, + ds_id = DataID(default_id_keys_config, name="solar_zenith_angle", resolution=not_existing_resolution, modifiers=()) assert reader.get_dataset(ds_id, {}) is None - def test_dataid_attrs_equal_contains_not_matching_key(self, _cf_scene, _nc_filename): + def test_dataid_attrs_equal_contains_not_matching_key(self, cf_scene, nc_filename): """Check that get_dataset returns valid dataset when dataid have key(s) not existing in data.""" from satpy.dataset.dataid import DataID, default_id_keys_config - _create_test_netcdf(_nc_filename, resolution=742) - reader = SatpyCFFileHandler(_nc_filename, {}, {'filetype': 'info'}) - ds_id = DataID(default_id_keys_config, name='solar_zenith_angle', resolution=742, - modifiers=(), calibration='counts') + _create_test_netcdf(nc_filename, resolution=742) + reader = SatpyCFFileHandler(nc_filename, {}, {"filetype": "info"}) + ds_id = DataID(default_id_keys_config, name="solar_zenith_angle", resolution=742, + modifiers=(), calibration="counts") res = reader.get_dataset(ds_id, {}) - assert res.attrs['resolution'] == 742 + assert res.attrs["resolution"] == 742 diff --git a/satpy/tests/reader_tests/test_scmi.py b/satpy/tests/reader_tests/test_scmi.py index 6d42720c8d..89eda0479a 100644 --- a/satpy/tests/reader_tests/test_scmi.py +++ b/satpy/tests/reader_tests/test_scmi.py @@ -56,7 +56,7 @@ def close(self): class TestSCMIFileHandler(unittest.TestCase): """Test the SCMIFileHandler reader.""" - @mock.patch('satpy.readers.scmi.xr') + @mock.patch("satpy.readers.scmi.xr") def setUp(self, xr_): """Set up for test.""" from satpy.readers.scmi import SCMIFileHandler @@ -68,213 +68,209 @@ def setUp(self, xr_): time = xr.DataArray(0.) rad = xr.DataArray( rad_data, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'scale_factor': 0.5, - 'add_offset': -1., - '_FillValue': 20, - 'standard_name': 'toa_bidirectional_reflectance', + "scale_factor": 0.5, + "add_offset": -1., + "_FillValue": 20, + "standard_name": "toa_bidirectional_reflectance", }, coords={ - 'time': time, + "time": time, } ) xr_.open_dataset.return_value = FakeDataset( { - 'Sectorized_CMI': rad, + "Sectorized_CMI": rad, "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02), }, { - 'start_date_time': "2017210120000", - 'satellite_id': 'GOES-16', - 'satellite_longitude': -90., - 'satellite_latitude': 0., - 'satellite_altitude': 35785831., + "start_date_time": "2017210120000", + "satellite_id": "GOES-16", + "satellite_longitude": -90., + "satellite_latitude": 0., + "satellite_altitude": 35785831., }, - {'y': 2, 'x': 5}, + {"y": 2, "x": 5}, ) - self.reader = SCMIFileHandler('filename', - {'platform_shortname': 'G16'}, - {'filetype': 'info'}) + self.reader = SCMIFileHandler("filename", + {"platform_shortname": "G16"}, + {"filetype": "info"}) def test_basic_attributes(self): """Test getting basic file attributes.""" from datetime import datetime from satpy.tests.utils import make_dataid - self.assertEqual(self.reader.start_time, - datetime(2017, 7, 29, 12, 0, 0, 0)) - self.assertEqual(self.reader.end_time, - datetime(2017, 7, 29, 12, 0, 0, 0)) - self.assertEqual(self.reader.get_shape(make_dataid(name='C05'), {}), - (2, 5)) + assert self.reader.start_time == datetime(2017, 7, 29, 12, 0, 0, 0) + assert self.reader.end_time == datetime(2017, 7, 29, 12, 0, 0, 0) + assert self.reader.get_shape(make_dataid(name="C05"), {}) == (2, 5) def test_data_load(self): """Test data loading.""" from satpy.tests.utils import make_dataid res = self.reader.get_dataset( - make_dataid(name='C05', calibration='reflectance'), {}) + make_dataid(name="C05", calibration="reflectance"), {}) np.testing.assert_allclose(res.data, self.expected_rad, equal_nan=True) - self.assertNotIn('scale_factor', res.attrs) - self.assertNotIn('_FillValue', res.attrs) - self.assertEqual(res.attrs['standard_name'], - 'toa_bidirectional_reflectance') - assert 'orbital_parameters' in res.attrs - orb_params = res.attrs['orbital_parameters'] - assert orb_params['projection_longitude'] == -90.0 - assert orb_params['projection_latitude'] == 0.0 - assert orb_params['projection_altitude'] == 35785831.0 + assert "scale_factor" not in res.attrs + assert "_FillValue" not in res.attrs + assert res.attrs["standard_name"] == "toa_bidirectional_reflectance" + assert "orbital_parameters" in res.attrs + orb_params = res.attrs["orbital_parameters"] + assert orb_params["projection_longitude"] == -90.0 + assert orb_params["projection_latitude"] == 0.0 + assert orb_params["projection_altitude"] == 35785831.0 class TestSCMIFileHandlerArea(unittest.TestCase): """Test the SCMIFileHandler's area creation.""" - @mock.patch('satpy.readers.scmi.xr') + @mock.patch("satpy.readers.scmi.xr") def create_reader(self, proj_name, proj_attrs, xr_): """Create a fake reader.""" from satpy.readers.scmi import SCMIFileHandler proj = xr.DataArray([], attrs=proj_attrs) x__ = xr.DataArray( [0, 1], - attrs={'scale_factor': 2., 'add_offset': -1., 'units': 'meters'}, + attrs={"scale_factor": 2., "add_offset": -1., "units": "meters"}, ) y__ = xr.DataArray( [0, 1], - attrs={'scale_factor': -2., 'add_offset': 1., 'units': 'meters'}, + attrs={"scale_factor": -2., "add_offset": 1., "units": "meters"}, ) xr_.open_dataset.return_value = FakeDataset({ - 'goes_imager_projection': proj, - 'x': x__, - 'y': y__, - 'Sectorized_CMI': np.ones((2, 2))}, + "goes_imager_projection": proj, + "x": x__, + "y": y__, + "Sectorized_CMI": np.ones((2, 2))}, { - 'satellite_id': 'GOES-16', - 'grid_mapping': proj_name, + "satellite_id": "GOES-16", + "grid_mapping": proj_name, }, { - 'y': y__.size, - 'x': x__.size, + "y": y__.size, + "x": x__.size, } ) - return SCMIFileHandler('filename', - {'platform_shortname': 'G16'}, - {'filetype': 'info'}) + return SCMIFileHandler("filename", + {"platform_shortname": "G16"}, + {"filetype": "info"}) - @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') + @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_geos(self, adef): """Test the area generation for geos projection.""" reader = self.create_reader( - 'goes_imager_projection', + "goes_imager_projection", { - 'semi_major_axis': 1., - 'semi_minor_axis': 1., - 'perspective_point_height': 1., - 'longitude_of_projection_origin': -90., - 'sweep_angle_axis': u'x', - 'grid_mapping_name': 'geostationary', + "semi_major_axis": 1., + "semi_minor_axis": 1., + "perspective_point_height": 1., + "longitude_of_projection_origin": -90., + "sweep_angle_axis": u"x", + "grid_mapping_name": "geostationary", } ) reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - self.assertDictEqual(call_args[3], { - 'a': 1.0, 'b': 1.0, 'h': 1.0, 'lon_0': -90.0, 'lat_0': 0.0, - 'proj': 'geos', 'sweep': 'x', 'units': 'm'}) - self.assertEqual(call_args[4], reader.ncols) - self.assertEqual(call_args[5], reader.nlines) + assert call_args[3] == {"a": 1.0, "b": 1.0, "h": 1.0, + "lon_0": -90.0, "lat_0": 0.0, + "proj": "geos", "sweep": "x", "units": "m"} + assert call_args[4] == reader.ncols + assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) - @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') + @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_lcc(self, adef): """Test the area generation for lcc projection.""" reader = self.create_reader( - 'goes_imager_projection', + "goes_imager_projection", { - 'semi_major_axis': 1., - 'semi_minor_axis': 1., - 'longitude_of_central_meridian': -90., - 'standard_parallel': 25., - 'latitude_of_projection_origin': 25., - 'grid_mapping_name': 'lambert_conformal_conic', + "semi_major_axis": 1., + "semi_minor_axis": 1., + "longitude_of_central_meridian": -90., + "standard_parallel": 25., + "latitude_of_projection_origin": 25., + "grid_mapping_name": "lambert_conformal_conic", } ) reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - self.assertDictEqual(call_args[3], { - 'a': 1.0, 'b': 1.0, 'lon_0': -90.0, 'lat_0': 25.0, 'lat_1': 25.0, - 'proj': 'lcc', 'units': 'm'}) - self.assertEqual(call_args[4], reader.ncols) - self.assertEqual(call_args[5], reader.nlines) + assert call_args[3] == {"a": 1.0, "b": 1.0, + "lon_0": -90.0, "lat_0": 25.0, "lat_1": 25.0, + "proj": "lcc", "units": "m"} + assert call_args[4] == reader.ncols + assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) - @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') + @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_stere(self, adef): """Test the area generation for stere projection.""" reader = self.create_reader( - 'goes_imager_projection', + "goes_imager_projection", { - 'semi_major_axis': 1., - 'semi_minor_axis': 1., - 'straight_vertical_longitude_from_pole': -90., - 'standard_parallel': 60., - 'latitude_of_projection_origin': 90., - 'grid_mapping_name': 'polar_stereographic', + "semi_major_axis": 1., + "semi_minor_axis": 1., + "straight_vertical_longitude_from_pole": -90., + "standard_parallel": 60., + "latitude_of_projection_origin": 90., + "grid_mapping_name": "polar_stereographic", } ) reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - self.assertDictEqual(call_args[3], { - 'a': 1.0, 'b': 1.0, 'lon_0': -90.0, 'lat_0': 90.0, 'lat_ts': 60.0, - 'proj': 'stere', 'units': 'm'}) - self.assertEqual(call_args[4], reader.ncols) - self.assertEqual(call_args[5], reader.nlines) + assert call_args[3] == {"a": 1.0, "b": 1.0, + "lon_0": -90.0, "lat_0": 90.0, "lat_ts": 60.0, + "proj": "stere", "units": "m"} + assert call_args[4] == reader.ncols + assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) - @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') + @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_merc(self, adef): """Test the area generation for merc projection.""" reader = self.create_reader( - 'goes_imager_projection', + "goes_imager_projection", { - 'semi_major_axis': 1., - 'semi_minor_axis': 1., - 'longitude_of_projection_origin': -90., - 'standard_parallel': 0., - 'grid_mapping_name': 'mercator', + "semi_major_axis": 1., + "semi_minor_axis": 1., + "longitude_of_projection_origin": -90., + "standard_parallel": 0., + "grid_mapping_name": "mercator", } ) reader.get_area_def(None) - self.assertEqual(adef.call_count, 1) + assert adef.call_count == 1 call_args = tuple(adef.call_args)[0] - self.assertDictEqual(call_args[3], { - 'a': 1.0, 'b': 1.0, 'lon_0': -90.0, 'lat_0': 0.0, 'lat_ts': 0.0, - 'proj': 'merc', 'units': 'm'}) - self.assertEqual(call_args[4], reader.ncols) - self.assertEqual(call_args[5], reader.nlines) + assert call_args[3] == {"a": 1.0, "b": 1.0, + "lon_0": -90.0, "lat_0": 0.0, "lat_ts": 0.0, + "proj": "merc", "units": "m"} + assert call_args[4] == reader.ncols + assert call_args[5] == reader.nlines np.testing.assert_allclose(call_args[6], (-2., -2., 2, 2.)) - @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition') + @mock.patch("satpy.readers.abi_base.geometry.AreaDefinition") def test_get_area_def_bad(self, adef): """Test the area generation for bad projection.""" reader = self.create_reader( - 'goes_imager_projection', + "goes_imager_projection", { - 'semi_major_axis': 1., - 'semi_minor_axis': 1., - 'longitude_of_projection_origin': -90., - 'standard_parallel': 0., - 'grid_mapping_name': 'fake', + "semi_major_axis": 1., + "semi_minor_axis": 1., + "longitude_of_projection_origin": -90., + "standard_parallel": 0., + "grid_mapping_name": "fake", } ) self.assertRaises(ValueError, reader.get_area_def, None) diff --git a/satpy/tests/reader_tests/test_seadas_l2.py b/satpy/tests/reader_tests/test_seadas_l2.py index c56fe42b33..01de26e96b 100644 --- a/satpy/tests/reader_tests/test_seadas_l2.py +++ b/satpy/tests/reader_tests/test_seadas_l2.py @@ -112,13 +112,13 @@ def _create_seadas_chlor_a_hdf4_file(full_path, mission, sensor): def _add_variable_to_hdf4_file(h, var_name, var_info): - v = h.create(var_name, var_info['type'], var_info['data'].shape) - v[:] = var_info['data'] - for dim_count, dimension_name in enumerate(var_info['dim_labels']): + v = h.create(var_name, var_info["type"], var_info["data"].shape) + v[:] = var_info["data"] + for dim_count, dimension_name in enumerate(var_info["dim_labels"]): v.dim(dim_count).setname(dimension_name) - if var_info.get('fill_value'): - v.setfillvalue(var_info['fill_value']) - for attr_key, attr_val in var_info['attrs'].items(): + if var_info.get("fill_value"): + v.setfillvalue(var_info["fill_value"]) + for attr_key, attr_val in var_info["attrs"].items(): setattr(v, attr_key, attr_val) @@ -196,8 +196,8 @@ def _create_seadas_chlor_a_netcdf_file(full_path, mission, sensor): def _add_variable_to_netcdf_file(nc, var_name, var_info): v = nc.createVariable(var_name, var_info["data"].dtype.str[1:], dimensions=var_info["dim_labels"], fill_value=var_info.get("fill_value")) - v[:] = var_info['data'] - for attr_key, attr_val in var_info['attrs'].items(): + v[:] = var_info["data"] + for attr_key, attr_val in var_info["attrs"].items(): setattr(v, attr_key, attr_val) @@ -206,7 +206,7 @@ class TestSEADAS: def test_available_reader(self): """Test that SEADAS L2 reader is available.""" - assert 'seadas_l2' in available_readers() + assert "seadas_l2" in available_readers() @pytest.mark.parametrize( "input_files", @@ -217,10 +217,10 @@ def test_available_reader(self): ]) def test_scene_available_datasets(self, input_files): """Test that datasets are available.""" - scene = Scene(reader='seadas_l2', filenames=input_files) + scene = Scene(reader="seadas_l2", filenames=input_files) available_datasets = scene.all_dataset_names() assert len(available_datasets) > 0 - assert 'chlor_a' in available_datasets + assert "chlor_a" in available_datasets @pytest.mark.parametrize( ("input_files", "exp_plat", "exp_sensor", "exp_rps"), @@ -234,13 +234,13 @@ def test_scene_available_datasets(self, input_files): def test_load_chlor_a(self, input_files, exp_plat, exp_sensor, exp_rps, apply_quality_flags): """Test that we can load 'chlor_a'.""" reader_kwargs = {"apply_quality_flags": apply_quality_flags} - scene = Scene(reader='seadas_l2', filenames=input_files, reader_kwargs=reader_kwargs) - scene.load(['chlor_a']) - data_arr = scene['chlor_a'] + scene = Scene(reader="seadas_l2", filenames=input_files, reader_kwargs=reader_kwargs) + scene.load(["chlor_a"]) + data_arr = scene["chlor_a"] assert data_arr.dims == ("y", "x") - assert data_arr.attrs['platform_name'] == exp_plat - assert data_arr.attrs['sensor'] == exp_sensor - assert data_arr.attrs['units'] == 'mg m^-3' + assert data_arr.attrs["platform_name"] == exp_plat + assert data_arr.attrs["sensor"] == exp_sensor + assert data_arr.attrs["units"] == "mg m^-3" assert data_arr.dtype.type == np.float32 assert isinstance(data_arr.attrs["area"], SwathDefinition) assert data_arr.attrs["rows_per_scan"] == exp_rps diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py index 32918ea45b..ced24a77ea 100644 --- a/satpy/tests/reader_tests/test_seviri_base.py +++ b/satpy/tests/reader_tests/test_seviri_base.py @@ -77,25 +77,24 @@ def test_chebyshev(self): def test_get_cds_time(self): """Test the get_cds_time function.""" # Scalar - self.assertEqual(get_cds_time(days=21246, msecs=12*3600*1000), - np.datetime64('2016-03-03 12:00')) + assert get_cds_time(days=21246, msecs=12 * 3600 * 1000) == np.datetime64("2016-03-03 12:00") # Array days = np.array([21246, 21247, 21248]) msecs = np.array([12*3600*1000, 13*3600*1000 + 1, 14*3600*1000 + 2]) - expected = np.array([np.datetime64('2016-03-03 12:00:00.000'), - np.datetime64('2016-03-04 13:00:00.001'), - np.datetime64('2016-03-05 14:00:00.002')]) + expected = np.array([np.datetime64("2016-03-03 12:00:00.000"), + np.datetime64("2016-03-04 13:00:00.001"), + np.datetime64("2016-03-05 14:00:00.002")]) np.testing.assert_equal(get_cds_time(days=days, msecs=msecs), expected) days = 21246 msecs = 12*3600*1000 - expected = np.datetime64('2016-03-03 12:00:00.000') + expected = np.datetime64("2016-03-03 12:00:00.000") np.testing.assert_equal(get_cds_time(days=days, msecs=msecs), expected) def test_pad_data_horizontally_bad_shape(self): """Test the error handling for the horizontal hrv padding.""" - data = xr.DataArray(data=np.zeros((1, 10)), dims=('y', 'x')) + data = xr.DataArray(data=np.zeros((1, 10)), dims=("y", "x")) east_bound = 5 west_bound = 10 final_size = (1, 20) @@ -104,7 +103,7 @@ def test_pad_data_horizontally_bad_shape(self): def test_pad_data_vertically_bad_shape(self): """Test the error handling for the vertical hrv padding.""" - data = xr.DataArray(data=np.zeros((10, 1)), dims=('y', 'x')) + data = xr.DataArray(data=np.zeros((10, 1)), dims=("y", "x")) south_bound = 5 north_bound = 10 final_size = (20, 1) @@ -121,23 +120,15 @@ def observation_end_time(self): def test_round_nom_time(self): """Test the rouding of start/end_time.""" - self.assertEqual(round_nom_time( - dt=self.observation_start_time(), - time_delta=timedelta(minutes=15) - ), - datetime(2023, 3, 20, 15, 0) - ) - self.assertEqual(round_nom_time( - dt=self.observation_end_time(), - time_delta=timedelta(minutes=15) - ), - datetime(2023, 3, 20, 15, 15) - ) + assert round_nom_time(dt=self.observation_start_time(), + time_delta=timedelta(minutes=15)) == datetime(2023, 3, 20, 15, 0) + assert round_nom_time(dt=self.observation_end_time(), + time_delta=timedelta(minutes=15)) == datetime(2023, 3, 20, 15, 15) @staticmethod def test_pad_data_horizontally(): """Test the horizontal hrv padding.""" - data = xr.DataArray(data=np.zeros((1, 10)), dims=('y', 'x')) + data = xr.DataArray(data=np.zeros((1, 10)), dims=("y", "x")) east_bound = 4 west_bound = 13 final_size = (1, 20) @@ -149,7 +140,7 @@ def test_pad_data_horizontally(): @staticmethod def test_pad_data_vertically(): """Test the vertical hrv padding.""" - data = xr.DataArray(data=np.zeros((10, 1)), dims=('y', 'x')) + data = xr.DataArray(data=np.zeros((10, 1)), dims=("y", "x")) south_bound = 4 north_bound = 13 final_size = (20, 1) @@ -179,30 +170,30 @@ def test_get_padding_area_int(): ORBIT_POLYNOMIALS = { - 'StartTime': np.array([ + "StartTime": np.array([ [ datetime(2006, 1, 1, 6), datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18), datetime(1958, 1, 1, 0)] ]), - 'EndTime': np.array([ + "EndTime": np.array([ [ datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18), datetime(2006, 1, 2, 0), datetime(1958, 1, 1, 0) ] ]), - 'X': [np.zeros(8), + "X": [np.zeros(8), [8.41607082e+04, 2.94319260e+00, 9.86748617e-01, -2.70135453e-01, -3.84364650e-02, 8.48718433e-03, 7.70548174e-04, -1.44262718e-04], np.zeros(8)], - 'Y': [np.zeros(8), + "Y": [np.zeros(8), [-5.21170255e+03, 5.12998948e+00, -1.33370453e+00, -3.09634144e-01, 6.18232793e-02, 7.50505681e-03, -1.35131011e-03, -1.12054405e-04], np.zeros(8)], - 'Z': [np.zeros(8), + "Z": [np.zeros(8), [-6.51293855e+02, 1.45830459e+02, 5.61379400e+01, -3.90970565e+00, -7.38137565e-01, 3.06131644e-02, 3.82892428e-03, @@ -214,7 +205,7 @@ def test_get_padding_area_int(): # 01-01: Small gap (12:00 - 13:00) # 01-02: Large gap (04:00 - 18:00) # 01-03: Overlap (10:00 - 13:00) - 'StartTime': np.array([ + "StartTime": np.array([ [ datetime(2005, 12, 31, 10), datetime(2005, 12, 31, 12), datetime(2006, 1, 1, 10), datetime(2006, 1, 1, 13), @@ -222,7 +213,7 @@ def test_get_padding_area_int(): datetime(2006, 1, 3, 6), datetime(2006, 1, 3, 10), ] ]), - 'EndTime': np.array([ + "EndTime": np.array([ [ datetime(2005, 12, 31, 12), datetime(2005, 12, 31, 18), datetime(2006, 1, 1, 12), datetime(2006, 1, 1, 18), @@ -230,31 +221,31 @@ def test_get_padding_area_int(): datetime(2006, 1, 3, 13), datetime(2006, 1, 3, 18), ] ]), - 'X': [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0], - 'Y': [1.1, 2.1, 3.1, 4.1, 5.1, 6.1, 7.1, 8.1], - 'Z': [1.2, 2.2, 3.2, 4.2, 5.2, 6.2, 7.2, 8.2], + "X": [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0], + "Y": [1.1, 2.1, 3.1, 4.1, 5.1, 6.1, 7.1, 8.1], + "Z": [1.2, 2.2, 3.2, 4.2, 5.2, 6.2, 7.2, 8.2], } ORBIT_POLYNOMIALS_INVALID = { - 'StartTime': np.array([ + "StartTime": np.array([ [ datetime(1958, 1, 1), datetime(1958, 1, 1) ] ]), - 'EndTime': np.array([ + "EndTime": np.array([ [ datetime(1958, 1, 1), datetime(1958, 1, 1) ] ]), - 'X': [1, 2], - 'Y': [3, 4], - 'Z': [5, 6], + "X": [1, 2], + "Y": [3, 4], + "Z": [5, 6], } class TestSatellitePosition: """Test locating the satellite.""" - @pytest.fixture + @pytest.fixture() def orbit_polynomial(self): """Get an orbit polynomial for testing.""" return OrbitPolynomial( @@ -273,7 +264,7 @@ def orbit_polynomial(self): ) ) - @pytest.fixture + @pytest.fixture() def time(self): """Get scan timestamp for testing.""" return datetime(2006, 1, 1, 12, 15, 9, 304888) @@ -304,7 +295,7 @@ class TestOrbitPolynomialFinder: """Unit tests for orbit polynomial finder.""" @pytest.mark.parametrize( - ('orbit_polynomials', 'time', 'orbit_polynomial_exp'), + ("orbit_polynomials", "time", "orbit_polynomial_exp"), [ # Contiguous validity intervals (that's the norm) ( @@ -312,8 +303,8 @@ class TestOrbitPolynomialFinder: datetime(2005, 12, 31, 12, 15), OrbitPolynomial( coefs=(2.0, 2.1, 2.2), - start_time=np.datetime64('2005-12-31 12:00'), - end_time=np.datetime64('2005-12-31 18:00') + start_time=np.datetime64("2005-12-31 12:00"), + end_time=np.datetime64("2005-12-31 18:00") ) ), # No interval enclosing the given timestamp, but closest interval @@ -323,8 +314,8 @@ class TestOrbitPolynomialFinder: datetime(2006, 1, 1, 12, 15), OrbitPolynomial( coefs=(3.0, 3.1, 3.2), - start_time=np.datetime64('2006-01-01 10:00'), - end_time=np.datetime64('2006-01-01 12:00') + start_time=np.datetime64("2006-01-01 10:00"), + end_time=np.datetime64("2006-01-01 12:00") ) ), # Overlapping intervals @@ -333,8 +324,8 @@ class TestOrbitPolynomialFinder: datetime(2006, 1, 3, 12, 15), OrbitPolynomial( coefs=(8.0, 8.1, 8.2), - start_time=np.datetime64('2006-01-03 10:00'), - end_time=np.datetime64('2006-01-03 18:00') + start_time=np.datetime64("2006-01-03 10:00"), + end_time=np.datetime64("2006-01-03 18:00") ) ), ] @@ -347,7 +338,7 @@ def test_get_orbit_polynomial(self, orbit_polynomials, time, assert orbit_polynomial == orbit_polynomial_exp @pytest.mark.parametrize( - ('orbit_polynomials', 'time'), + ("orbit_polynomials", "time"), [ # No interval enclosing the given timestamp and closest interval # too far away @@ -366,17 +357,17 @@ def test_get_orbit_polynomial_exceptions(self, orbit_polynomials, time): class TestMeirinkSlope: """Unit tests for the slope of Meirink calibration.""" - @pytest.mark.parametrize('platform_id', [321, 322, 323, 324]) - @pytest.mark.parametrize('channel_name', ['VIS006', 'VIS008', 'IR_016']) + @pytest.mark.parametrize("platform_id", [321, 322, 323, 324]) + @pytest.mark.parametrize("channel_name", ["VIS006", "VIS008", "IR_016"]) def test_get_meirink_slope_epoch(self, platform_id, channel_name): """Test the value of the slope of the Meirink calibration on 2000-01-01.""" - coefs = {'coefs': {}} - coefs['coefs']['NOMINAL'] = {'gain': -1, 'offset': -1} - coefs['coefs']['EXTERNAL'] = {} - calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK-2023', MEIRINK_EPOCH) - assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS['2023'][platform_id][channel_name][0]/1000. + coefs = {"coefs": {}} + coefs["coefs"]["NOMINAL"] = {"gain": -1, "offset": -1} + coefs["coefs"]["EXTERNAL"] = {} + calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, "MEIRINK-2023", MEIRINK_EPOCH) + assert calibration_handler.get_gain_offset()[0] == MEIRINK_COEFS["2023"][platform_id][channel_name][0]/1000. - @pytest.mark.parametrize('platform_id,time,expected', ( + @pytest.mark.parametrize(("platform_id", "time", "expected"), [ (321, datetime(2005, 1, 18, 0, 0), [0.0250354716, 0.0315626684, 0.022880986]), (321, datetime(2010, 12, 31, 0, 0), [0.0258479563, 0.0322386887, 0.022895110500000003]), (322, datetime(2010, 1, 18, 0, 0), [0.021964051999999998, 0.027548445, 0.021576766]), @@ -385,12 +376,12 @@ def test_get_meirink_slope_epoch(self, platform_id, channel_name): (323, datetime(2010, 12, 31, 0, 0), [0.022181355200000002, 0.0280103379, 0.0229511138]), (324, datetime(2010, 1, 18, 0, 0), [0.0218362, 0.027580748, 0.022285370999999998]), (324, datetime(2015, 6, 1, 0, 0), [0.0225418, 0.028530172, 0.022248718999999997]), - )) + ]) def test_get_meirink_slope_2020(self, platform_id, time, expected): """Test the value of the slope of the Meirink calibration.""" - coefs = {'coefs': {}} - coefs['coefs']['NOMINAL'] = {'gain': -1, 'offset': -1} - coefs['coefs']['EXTERNAL'] = {} - for i, channel_name in enumerate(['VIS006', 'VIS008', 'IR_016']): - calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, 'MEIRINK-2023', time) + coefs = {"coefs": {}} + coefs["coefs"]["NOMINAL"] = {"gain": -1, "offset": -1} + coefs["coefs"]["EXTERNAL"] = {} + for i, channel_name in enumerate(["VIS006", "VIS008", "IR_016"]): + calibration_handler = SEVIRICalibrationHandler(platform_id, channel_name, coefs, "MEIRINK-2023", time) assert abs(calibration_handler.get_gain_offset()[0] - expected[i]) < 1e-6 diff --git a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py index fdcedea3f2..d46af5abd2 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py @@ -53,7 +53,7 @@ CAL_TYPE1 = 1 CAL_TYPE2 = 2 CAL_TYPEBAD = -1 -CHANNEL_NAME = 'IR_108' +CHANNEL_NAME = "IR_108" PLATFORM_ID = 323 # Met-10 TBS_OUTPUT1 = xr.DataArray( @@ -117,14 +117,14 @@ def test_convert_to_radiance(self): """Test the conversion from counts to radiances.""" result = self.algo.convert_to_radiance(COUNTS_INPUT, GAIN, OFFSET) xr.testing.assert_allclose(result, RADIANCES_OUTPUT) - self.assertEqual(result.dtype, np.float32) + assert result.dtype == np.float32 def test_ir_calibrate(self): """Test conversion from radiance to brightness temperature.""" result = self.algo.ir_calibrate(RADIANCES_OUTPUT, CHANNEL_NAME, CAL_TYPE1) xr.testing.assert_allclose(result, TBS_OUTPUT1, rtol=1E-5) - self.assertEqual(result.dtype, np.float32) + assert result.dtype == np.float32 result = self.algo.ir_calibrate(RADIANCES_OUTPUT, CHANNEL_NAME, CAL_TYPE2) @@ -138,8 +138,8 @@ def test_vis_calibrate(self): result = self.algo.vis_calibrate(VIS008_RADIANCE, VIS008_SOLAR_IRRADIANCE) xr.testing.assert_allclose(result, VIS008_REFLECTANCE) - self.assertTrue(result.sun_earth_distance_correction_applied) - self.assertEqual(result.dtype, np.float32) + assert result.sun_earth_distance_correction_applied + assert result.dtype == np.float32 class TestSeviriCalibrationHandler: @@ -147,33 +147,33 @@ class TestSeviriCalibrationHandler: def test_init(self): """Test initialization of the calibration handler.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Invalid calibration mode: INVALID. Choose one of (.*)"): SEVIRICalibrationHandler( platform_id=None, channel_name=None, coefs=None, - calib_mode='invalid', + calib_mode="invalid", scan_time=None ) - def _get_calibration_handler(self, calib_mode='NOMINAL', ext_coefs=None): + def _get_calibration_handler(self, calib_mode="NOMINAL", ext_coefs=None): """Provide a calibration handler.""" return SEVIRICalibrationHandler( platform_id=324, - channel_name='IR_108', + channel_name="IR_108", coefs={ - 'coefs': { - 'NOMINAL': { - 'gain': 10, - 'offset': -1 + "coefs": { + "NOMINAL": { + "gain": 10, + "offset": -1 }, - 'GSICS': { - 'gain': 20, - 'offset': -2 + "GSICS": { + "gain": 20, + "offset": -2 }, - 'EXTERNAL': ext_coefs or {} + "EXTERNAL": ext_coefs or {} }, - 'radiance_type': 1 + "radiance_type": 1 }, calib_mode=calib_mode, scan_time=None @@ -182,16 +182,16 @@ def _get_calibration_handler(self, calib_mode='NOMINAL', ext_coefs=None): def test_calibrate_exceptions(self): """Test exceptions raised by the calibration handler.""" calib = self._get_calibration_handler() - with pytest.raises(ValueError): - calib.calibrate(None, 'invalid') + with pytest.raises(ValueError, match="Invalid calibration invalid for channel IR_108"): + calib.calibrate(None, "invalid") @pytest.mark.parametrize( - ('calib_mode', 'ext_coefs', 'expected'), + ("calib_mode", "ext_coefs", "expected"), [ - ('NOMINAL', {}, (10, -1)), - ('GSICS', {}, (20, -40)), - ('GSICS', {'gain': 30, 'offset': -3}, (30, -3)), - ('NOMINAL', {'gain': 30, 'offset': -3}, (30, -3)) + ("NOMINAL", {}, (10, -1)), + ("GSICS", {}, (20, -40)), + ("GSICS", {"gain": 30, "offset": -3}, (30, -3)), + ("NOMINAL", {"gain": 30, "offset": -3}, (30, -3)) ] ) def test_get_gain_offset(self, calib_mode, ext_coefs, expected): @@ -214,145 +214,145 @@ class TestFileHandlerCalibrationBase: radiance_types = 2 * np.ones(12) scan_time = datetime(2020, 1, 1) external_coefs = { - 'VIS006': {'gain': 10, 'offset': -10}, - 'IR_108': {'gain': 20, 'offset': -20}, - 'HRV': {'gain': 5, 'offset': -5} + "VIS006": {"gain": 10, "offset": -10}, + "IR_108": {"gain": 20, "offset": -20}, + "HRV": {"gain": 5, "offset": -5} } - spectral_channel_ids = {'VIS006': 1, 'IR_108': 9, 'HRV': 12} + spectral_channel_ids = {"VIS006": 1, "IR_108": 9, "HRV": 12} expected = { - 'VIS006': { - 'counts': { - 'NOMINAL': xr.DataArray( + "VIS006": { + "counts": { + "NOMINAL": xr.DataArray( [[0, 10], [100, 255]], - dims=('y', 'x') + dims=("y", "x") ) }, - 'radiance': { - 'NOMINAL': xr.DataArray( + "radiance": { + "NOMINAL": xr.DataArray( [[np.nan, 9], [99, 254]], - dims=('y', 'x') + dims=("y", "x") ), - 'GSICS': xr.DataArray( + "GSICS": xr.DataArray( [[np.nan, 9], [99, 254]], - dims=('y', 'x') + dims=("y", "x") ), - 'EXTERNAL': xr.DataArray( + "EXTERNAL": xr.DataArray( [[np.nan, 90], [990, 2540]], - dims=('y', 'x') + dims=("y", "x") ) }, - 'reflectance': { - 'NOMINAL': xr.DataArray( + "reflectance": { + "NOMINAL": xr.DataArray( [[np.nan, 41.88985], [460.7884, 1182.2247]], - dims=('y', 'x') + dims=("y", "x") ), - 'EXTERNAL': xr.DataArray( + "EXTERNAL": xr.DataArray( [[np.nan, 418.89853], [4607.8843, 11822.249]], - dims=('y', 'x') + dims=("y", "x") ) } }, - 'IR_108': { - 'counts': { - 'NOMINAL': xr.DataArray( + "IR_108": { + "counts": { + "NOMINAL": xr.DataArray( [[0, 10], [100, 255]], - dims=('y', 'x') + dims=("y", "x") ) }, - 'radiance': { - 'NOMINAL': xr.DataArray( + "radiance": { + "NOMINAL": xr.DataArray( [[np.nan, 81], [891, 2286]], - dims=('y', 'x') + dims=("y", "x") ), - 'GSICS': xr.DataArray( + "GSICS": xr.DataArray( [[np.nan, 8.19], [89.19, 228.69]], - dims=('y', 'x') + dims=("y", "x") ), - 'EXTERNAL': xr.DataArray( + "EXTERNAL": xr.DataArray( [[np.nan, 180], [1980, 5080]], - dims=('y', 'x') + dims=("y", "x") ) }, - 'brightness_temperature': { - 'NOMINAL': xr.DataArray( + "brightness_temperature": { + "NOMINAL": xr.DataArray( [[np.nan, 279.82318], [543.2585, 812.77167]], - dims=('y', 'x') + dims=("y", "x") ), - 'GSICS': xr.DataArray( + "GSICS": xr.DataArray( [[np.nan, 189.20985], [285.53293, 356.06668]], - dims=('y', 'x') + dims=("y", "x") ), - 'EXTERNAL': xr.DataArray( + "EXTERNAL": xr.DataArray( [[np.nan, 335.14236], [758.6249, 1262.7567]], - dims=('y', 'x') + dims=("y", "x") ), } }, - 'HRV': { - 'counts': { - 'NOMINAL': xr.DataArray( + "HRV": { + "counts": { + "NOMINAL": xr.DataArray( [[0, 10], [100, 255]], - dims=('y', 'x') + dims=("y", "x") ) }, - 'radiance': { - 'NOMINAL': xr.DataArray( + "radiance": { + "NOMINAL": xr.DataArray( [[np.nan, 108], [1188, 3048]], - dims=('y', 'x') + dims=("y", "x") ), - 'GSICS': xr.DataArray( + "GSICS": xr.DataArray( [[np.nan, 108], [1188, 3048]], - dims=('y', 'x') + dims=("y", "x") ), - 'EXTERNAL': xr.DataArray( + "EXTERNAL": xr.DataArray( [[np.nan, 45], [495, 1270]], - dims=('y', 'x') + dims=("y", "x") ) }, - 'reflectance': { - 'NOMINAL': xr.DataArray( + "reflectance": { + "NOMINAL": xr.DataArray( [[np.nan, 415.26767], [4567.944, 11719.775]], - dims=('y', 'x') + dims=("y", "x") ), - 'EXTERNAL': xr.DataArray( + "EXTERNAL": xr.DataArray( [[np.nan, 173.02817], [1903.31, 4883.2397]], - dims=('y', 'x') + dims=("y", "x") ) } } } - @pytest.fixture(name='counts') + @pytest.fixture(name="counts") def counts(self): """Provide fake image counts.""" return xr.DataArray( [[0, 10], [100, 255]], - dims=('y', 'x') + dims=("y", "x") ) def _get_expected( self, channel, calibration, calib_mode, use_ext_coefs ): if use_ext_coefs: - return self.expected[channel][calibration]['EXTERNAL'] + return self.expected[channel][calibration]["EXTERNAL"] return self.expected[channel][calibration][calib_mode] diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py index 90785ffdbf..0ce40d8dfc 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py @@ -54,89 +54,88 @@ def setUp(self): ncols=5568, ) self.reader.mda.update({ - 'segment_sequence_number': 18, - 'planned_start_segment_number': 1 + "segment_sequence_number": 18, + "planned_start_segment_number": 1 }) self.reader.fill_hrv = True - @mock.patch('satpy.readers.hrit_base.np.memmap') + @mock.patch("satpy.readers.hrit_base.np.memmap") def test_read_hrv_band(self, memmap): """Test reading the hrv band.""" - nbits = self.reader.mda['number_of_bits_per_pixel'] + nbits = self.reader.mda["number_of_bits_per_pixel"] memmap.return_value = np.random.randint(0, 256, size=int((464 * 5568 * nbits) / 8), dtype=np.uint8) - res = self.reader.read_band('HRV', None) - self.assertEqual(res.shape, (464, 5568)) + res = self.reader.read_band("HRV", None) + assert res.shape == (464, 5568) - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") def test_get_dataset(self, calibrate, parent_get_dataset): """Test getting the hrv dataset.""" - key = make_dataid(name='HRV', calibration='reflectance') + key = make_dataid(name="HRV", calibration="reflectance") info = setup.get_fake_dataset_info() parent_get_dataset.return_value = mock.MagicMock() - calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=('y', 'x')) + calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=("y", "x")) res = self.reader.get_dataset(key, info) - self.assertEqual(res.shape, (464, 11136)) + assert res.shape == (464, 11136) # Test method calls parent_get_dataset.assert_called_with(key, info) - calibrate.assert_called_with(parent_get_dataset(), key['calibration']) + calibrate.assert_called_with(parent_get_dataset(), key["calibration"]) self.assert_attrs_equal(res.attrs, setup.get_attrs_exp()) np.testing.assert_equal( - res['acq_time'], + res["acq_time"], setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") def test_get_dataset_non_fill(self, calibrate, parent_get_dataset): """Test getting a non-filled hrv dataset.""" - key = make_dataid(name='HRV', calibration='reflectance') - key.name = 'HRV' + key = make_dataid(name="HRV", calibration="reflectance") + key.name = "HRV" info = setup.get_fake_dataset_info() self.reader.fill_hrv = False parent_get_dataset.return_value = mock.MagicMock() - calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=('y', 'x')) + calibrate.return_value = xr.DataArray(data=np.zeros((464, 5568)), dims=("y", "x")) res = self.reader.get_dataset(key, info) - self.assertEqual(res.shape, (464, 5568)) + assert res.shape == (464, 5568) # Test method calls parent_get_dataset.assert_called_with(key, info) - calibrate.assert_called_with(parent_get_dataset(), key['calibration']) + calibrate.assert_called_with(parent_get_dataset(), key["calibration"]) self.assert_attrs_equal(res.attrs, setup.get_attrs_exp()) np.testing.assert_equal( - res['acq_time'], + res["acq_time"], setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) def test_get_area_def(self): """Test getting the area def.""" from pyresample.utils import proj4_radius_parameters - area = self.reader.get_area_def(make_dataid(name='HRV', resolution=1000)) - self.assertEqual(area.area_extent, - (-45561979844414.07, -3720765401003.719, 45602912357076.38, 77771774058.38356)) + area = self.reader.get_area_def(make_dataid(name="HRV", resolution=1000)) + assert area.area_extent == (-45561979844414.07, -3720765401003.719, 45602912357076.38, 77771774058.38356) proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) - self.assertEqual(a, 6378169.0) - self.assertAlmostEqual(b, 6356583.8) - self.assertEqual(proj_dict['h'], 35785831.0) - self.assertEqual(proj_dict['lon_0'], 0.0) - self.assertEqual(proj_dict['proj'], 'geos') - self.assertEqual(proj_dict['units'], 'm') + assert a == 6378169.0 + assert b == pytest.approx(6356583.8) + assert proj_dict["h"] == 35785831.0 + assert proj_dict["lon_0"] == 0.0 + assert proj_dict["proj"] == "geos" + assert proj_dict["units"] == "m" self.reader.fill_hrv = False - area = self.reader.get_area_def(make_dataid(name='HRV', resolution=1000)) + area = self.reader.get_area_def(make_dataid(name="HRV", resolution=1000)) npt.assert_allclose(area.defs[0].area_extent, (-22017598561055.01, -2926674655354.9604, 23564847539690.22, 77771774058.38356)) npt.assert_allclose(area.defs[1].area_extent, (-30793529275853.656, -3720765401003.719, 14788916824891.568, -2926674655354.9604)) - self.assertEqual(area.defs[0].area_id, 'msg_seviri_fes_1km') - self.assertEqual(area.defs[1].area_id, 'msg_seviri_fes_1km') + assert area.defs[0].area_id == "msg_seviri_fes_1km" + assert area.defs[1].area_id == "msg_seviri_fes_1km" class TestHRITMSGFileHandler(TestHRITMSGBase): @@ -155,70 +154,66 @@ def setUp(self): projection_longitude=self.projection_longitude ) self.reader.mda.update({ - 'segment_sequence_number': 18, - 'planned_start_segment_number': 1 + "segment_sequence_number": 18, + "planned_start_segment_number": 1 }) def _get_fake_data(self): return xr.DataArray( data=np.zeros((self.nlines, self.ncols)), - dims=('y', 'x') + dims=("y", "x") ) def test_get_area_def(self): """Test getting the area def.""" from pyresample.utils import proj4_radius_parameters - area = self.reader.get_area_def(make_dataid(name='VIS006', resolution=3000)) + area = self.reader.get_area_def(make_dataid(name="VIS006", resolution=3000)) proj_dict = area.proj_dict a, b = proj4_radius_parameters(proj_dict) - self.assertEqual(a, 6378169.0) - self.assertAlmostEqual(b, 6356583.8) - self.assertEqual(proj_dict['h'], 35785831.0) - self.assertEqual(proj_dict['lon_0'], self.projection_longitude) - self.assertEqual(proj_dict['proj'], 'geos') - self.assertEqual(proj_dict['units'], 'm') - self.assertEqual(area.area_extent, - (-77771774058.38356, -3720765401003.719, - 30310525626438.438, 77771774058.38356)) + assert a == 6378169.0 + assert b == pytest.approx(6356583.8) + assert proj_dict["h"] == 35785831.0 + assert proj_dict["lon_0"] == self.projection_longitude + assert proj_dict["proj"] == "geos" + assert proj_dict["units"] == "m" + assert area.area_extent == (-77771774058.38356, -3720765401003.719, 30310525626438.438, 77771774058.38356) # Data shifted by 1.5km to N-W - self.reader.mda['offset_corrected'] = False - area = self.reader.get_area_def(make_dataid(name='VIS006', resolution=3000)) - self.assertEqual(area.area_extent, - (-77771772558.38356, -3720765402503.719, - 30310525627938.438, 77771772558.38356)) + self.reader.mda["offset_corrected"] = False + area = self.reader.get_area_def(make_dataid(name="VIS006", resolution=3000)) + assert area.area_extent == (-77771772558.38356, -3720765402503.719, 30310525627938.438, 77771772558.38356) - self.assertEqual(area.area_id, 'msg_seviri_rss_3km') + assert area.area_id == "msg_seviri_rss_3km" - @mock.patch('satpy.readers.hrit_base.np.memmap') + @mock.patch("satpy.readers.hrit_base.np.memmap") def test_read_band(self, memmap): """Test reading a band.""" - nbits = self.reader.mda['number_of_bits_per_pixel'] + nbits = self.reader.mda["number_of_bits_per_pixel"] memmap.return_value = np.random.randint(0, 256, size=int((464 * 3712 * nbits) / 8), dtype=np.uint8) - res = self.reader.read_band('VIS006', None) - self.assertEqual(res.shape, (464, 3712)) + res = self.reader.read_band("VIS006", None) + assert res.shape == (464, 3712) - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") def test_get_dataset(self, calibrate, parent_get_dataset): """Test getting the dataset.""" data = self._get_fake_data() parent_get_dataset.return_value = mock.MagicMock() calibrate.return_value = data - key = make_dataid(name='VIS006', calibration='reflectance') + key = make_dataid(name="VIS006", calibration="reflectance") info = setup.get_fake_dataset_info() res = self.reader.get_dataset(key, info) # Test method calls - new_data = np.zeros_like(data.data).astype('float32') + new_data = np.zeros_like(data.data).astype("float32") new_data[:, :] = np.nan expected = data.copy(data=new_data) - expected['acq_time'] = ( - 'y', + expected["acq_time"] = ( + "y", setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) xr.testing.assert_equal(res, expected) @@ -227,36 +222,36 @@ def test_get_dataset(self, calibrate, parent_get_dataset): setup.get_attrs_exp(self.projection_longitude) ) # testing start/end time - self.assertEqual(datetime(2006, 1, 1, 12, 15, 9, 304888), self.reader.observation_start_time) - self.assertEqual(datetime(2006, 1, 1, 12, 15,), self.reader.start_time) - self.assertEqual(self.reader.start_time, self.reader.nominal_start_time) + assert datetime(2006, 1, 1, 12, 15, 9, 304888) == self.reader.observation_start_time + assert datetime(2006, 1, 1, 12, 15) == self.reader.start_time + assert self.reader.start_time == self.reader.nominal_start_time - self.assertEqual(datetime(2006, 1, 1, 12, 27, 39), self.reader.observation_end_time) - self.assertEqual(self.reader.end_time, self.reader.nominal_end_time) - self.assertEqual(datetime(2006, 1, 1, 12, 30,), self.reader.end_time) + assert datetime(2006, 1, 1, 12, 27, 39) == self.reader.observation_end_time + assert self.reader.end_time == self.reader.nominal_end_time + assert datetime(2006, 1, 1, 12, 30) == self.reader.end_time # test repeat cycle duration - self.assertEqual(15, self.reader._repeat_cycle_duration) + assert 15 == self.reader._repeat_cycle_duration # Change the reducescan scenario to test the repeat cycle duration handling - self.reader.epilogue['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] = 1 - self.assertEqual(5, self.reader._repeat_cycle_duration) + self.reader.epilogue["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] = 1 + assert 5 == self.reader._repeat_cycle_duration - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") def test_get_dataset_without_masking_bad_scan_lines(self, calibrate, parent_get_dataset): """Test getting the dataset.""" data = self._get_fake_data() parent_get_dataset.return_value = mock.MagicMock() calibrate.return_value = data - key = make_dataid(name='VIS006', calibration='reflectance') + key = make_dataid(name="VIS006", calibration="reflectance") info = setup.get_fake_dataset_info() self.reader.mask_bad_quality_scan_lines = False res = self.reader.get_dataset(key, info) # Test method calls expected = data.copy() - expected['acq_time'] = ( - 'y', + expected["acq_time"] = ( + "y", setup.get_acq_time_exp(self.observation_start_time, self.nlines) ) xr.testing.assert_equal(res, expected) @@ -265,27 +260,27 @@ def test_get_dataset_without_masking_bad_scan_lines(self, calibrate, parent_get_ setup.get_attrs_exp(self.projection_longitude) ) - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate') + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate") def test_get_dataset_with_raw_metadata(self, calibrate, parent_get_dataset): """Test getting the dataset.""" calibrate.return_value = self._get_fake_data() - key = make_dataid(name='VIS006', calibration='reflectance') + key = make_dataid(name="VIS006", calibration="reflectance") info = setup.get_fake_dataset_info() self.reader.include_raw_metadata = True res = self.reader.get_dataset(key, info) - assert 'raw_metadata' in res.attrs + assert "raw_metadata" in res.attrs def test_get_raw_mda(self): """Test provision of raw metadata.""" - self.reader.mda = {'segment': 1, 'loff': 123} - self.reader.prologue_.reduce = lambda max_size: {'prologue': 1} - self.reader.epilogue_.reduce = lambda max_size: {'epilogue': 1} - expected = {'prologue': 1, 'epilogue': 1, 'segment': 1} - self.assertDictEqual(self.reader._get_raw_mda(), expected) + self.reader.mda = {"segment": 1, "loff": 123} + self.reader.prologue_.reduce = lambda max_size: {"prologue": 1} + self.reader.epilogue_.reduce = lambda max_size: {"epilogue": 1} + expected = {"prologue": 1, "epilogue": 1, "segment": 1} + assert self.reader._get_raw_mda() == expected # Make sure _get_raw_mda() doesn't modify the original dictionary - self.assertIn('loff', self.reader.mda) + assert "loff" in self.reader.mda def test_satpos_no_valid_orbit_polynomial(self): """Test satellite position if there is no valid orbit polynomial.""" @@ -296,10 +291,7 @@ def test_satpos_no_valid_orbit_polynomial(self): projection_longitude=self.projection_longitude, orbit_polynomials=ORBIT_POLYNOMIALS_INVALID ) - self.assertNotIn( - 'satellite_actual_longitude', - reader.mda['orbital_parameters'] - ) + assert "satellite_actual_longitude" not in reader.mda["orbital_parameters"] class TestHRITMSGPrologueFileHandler(unittest.TestCase): @@ -314,8 +306,8 @@ def setUp(self, *mocks): ) self.reader = fh.prologue_ - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler.read_prologue') - @mock.patch('satpy.readers.hrit_base.HRITFileHandler.__init__', autospec=True) + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler.read_prologue") + @mock.patch("satpy.readers.hrit_base.HRITFileHandler.__init__", autospec=True) def test_extra_kwargs(self, init, *mocks): """Test whether the prologue file handler accepts extra keyword arguments.""" @@ -324,31 +316,31 @@ def init_patched(self, *args, **kwargs): init.side_effect = init_patched - HRITMSGPrologueFileHandler(filename='dummy_prologue_filename', - filename_info={'service': ''}, + HRITMSGPrologueFileHandler(filename="dummy_prologue_filename", + filename_info={"service": ""}, filetype_info=None, ext_calib_coefs={}, mda_max_array_size=123, - calib_mode='nominal') + calib_mode="nominal") - @mock.patch('satpy.readers.seviri_l1b_hrit.utils.reduce_mda') + @mock.patch("satpy.readers.seviri_l1b_hrit.utils.reduce_mda") def test_reduce(self, reduce_mda): """Test metadata reduction.""" - reduce_mda.return_value = 'reduced' + reduce_mda.return_value = "reduced" # Set buffer - self.assertEqual(self.reader.reduce(123), 'reduced') + assert self.reader.reduce(123) == "reduced" # Read buffer - self.assertEqual(self.reader.reduce(123), 'reduced') + assert self.reader.reduce(123) == "reduced" reduce_mda.assert_called_once() class TestHRITMSGEpilogueFileHandler(unittest.TestCase): """Test the HRIT epilogue file handler.""" - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue') - @mock.patch('satpy.readers.hrit_base.HRITFileHandler.__init__', autospec=True) + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue") + @mock.patch("satpy.readers.hrit_base.HRITFileHandler.__init__", autospec=True) def setUp(self, init, *mocks): """Set up the test case.""" @@ -357,13 +349,13 @@ def init_patched(self, *args, **kwargs): init.side_effect = init_patched - self.reader = HRITMSGEpilogueFileHandler(filename='dummy_epilogue_filename', - filename_info={'service': ''}, + self.reader = HRITMSGEpilogueFileHandler(filename="dummy_epilogue_filename", + filename_info={"service": ""}, filetype_info=None, - calib_mode='nominal') + calib_mode="nominal") - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue') - @mock.patch('satpy.readers.hrit_base.HRITFileHandler.__init__', autospec=True) + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue") + @mock.patch("satpy.readers.hrit_base.HRITFileHandler.__init__", autospec=True) def test_extra_kwargs(self, init, *mocks): """Test whether the epilogue file handler accepts extra keyword arguments.""" @@ -372,74 +364,74 @@ def init_patched(self, *args, **kwargs): init.side_effect = init_patched - HRITMSGEpilogueFileHandler(filename='dummy_epilogue_filename', - filename_info={'service': ''}, + HRITMSGEpilogueFileHandler(filename="dummy_epilogue_filename", + filename_info={"service": ""}, filetype_info=None, ext_calib_coefs={}, mda_max_array_size=123, - calib_mode='nominal') + calib_mode="nominal") - @mock.patch('satpy.readers.seviri_l1b_hrit.utils.reduce_mda') + @mock.patch("satpy.readers.seviri_l1b_hrit.utils.reduce_mda") def test_reduce(self, reduce_mda): """Test metadata reduction.""" - reduce_mda.return_value = 'reduced' + reduce_mda.return_value = "reduced" # Set buffer - self.assertEqual(self.reader.reduce(123), 'reduced') + assert self.reader.reduce(123) == "reduced" reduce_mda.assert_called() # Read buffer reduce_mda.reset_mock() - self.reader._reduced = 'red' - self.assertEqual(self.reader.reduce(123), 'red') + self.reader._reduced = "red" + assert self.reader.reduce(123) == "red" reduce_mda.assert_not_called() class TestHRITMSGCalibration(TestFileHandlerCalibrationBase): """Unit tests for calibration.""" - @pytest.fixture(name='file_handler') + @pytest.fixture(name="file_handler") def file_handler(self): """Create a mocked file handler.""" prolog = { - 'RadiometricProcessing': { - 'Level15ImageCalibration': { - 'CalSlope': self.gains_nominal, - 'CalOffset': self.offsets_nominal, + "RadiometricProcessing": { + "Level15ImageCalibration": { + "CalSlope": self.gains_nominal, + "CalOffset": self.offsets_nominal, }, - 'MPEFCalFeedback': { - 'GSICSCalCoeff': self.gains_gsics, - 'GSICSOffsetCount': self.offsets_gsics, + "MPEFCalFeedback": { + "GSICSCalCoeff": self.gains_gsics, + "GSICSOffsetCount": self.offsets_gsics, } }, - 'ImageDescription': { - 'Level15ImageProduction': { - 'PlannedChanProcessing': self.radiance_types + "ImageDescription": { + "Level15ImageProduction": { + "PlannedChanProcessing": self.radiance_types } }, - 'ImageAcquisition': { - 'PlannedAcquisitionTime': { - 'TrueRepeatCycleStart': self.scan_time, + "ImageAcquisition": { + "PlannedAcquisitionTime": { + "TrueRepeatCycleStart": self.scan_time, } } } epilog = { - 'ImageProductionStats': { - 'ActualScanningSummary': { - 'ForwardScanStart': self.scan_time + "ImageProductionStats": { + "ActualScanningSummary": { + "ForwardScanStart": self.scan_time } } } mda = { - 'image_segment_line_quality': { - 'line_validity': np.array([3, 3]), - 'line_radiometric_quality': np.array([4, 4]), - 'line_geometric_quality': np.array([4, 4]) + "image_segment_line_quality": { + "line_validity": np.array([3, 3]), + "line_radiometric_quality": np.array([4, 4]), + "line_geometric_quality": np.array([4, 4]) }, } with mock.patch( - 'satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.__init__', + "satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.__init__", return_value=None ): fh = HRITMSGFileHandler() @@ -450,33 +442,33 @@ def file_handler(self): return fh @pytest.mark.parametrize( - ('channel', 'calibration', 'calib_mode', 'use_ext_coefs'), + ("channel", "calibration", "calib_mode", "use_ext_coefs"), [ # VIS channel, internal coefficients - ('VIS006', 'counts', 'NOMINAL', False), - ('VIS006', 'radiance', 'NOMINAL', False), - ('VIS006', 'radiance', 'GSICS', False), - ('VIS006', 'reflectance', 'NOMINAL', False), + ("VIS006", "counts", "NOMINAL", False), + ("VIS006", "radiance", "NOMINAL", False), + ("VIS006", "radiance", "GSICS", False), + ("VIS006", "reflectance", "NOMINAL", False), # VIS channel, external coefficients (mode should have no effect) - ('VIS006', 'radiance', 'GSICS', True), - ('VIS006', 'reflectance', 'NOMINAL', True), + ("VIS006", "radiance", "GSICS", True), + ("VIS006", "reflectance", "NOMINAL", True), # IR channel, internal coefficients - ('IR_108', 'counts', 'NOMINAL', False), - ('IR_108', 'radiance', 'NOMINAL', False), - ('IR_108', 'radiance', 'GSICS', False), - ('IR_108', 'brightness_temperature', 'NOMINAL', False), - ('IR_108', 'brightness_temperature', 'GSICS', False), + ("IR_108", "counts", "NOMINAL", False), + ("IR_108", "radiance", "NOMINAL", False), + ("IR_108", "radiance", "GSICS", False), + ("IR_108", "brightness_temperature", "NOMINAL", False), + ("IR_108", "brightness_temperature", "GSICS", False), # IR channel, external coefficients (mode should have no effect) - ('IR_108', 'radiance', 'NOMINAL', True), - ('IR_108', 'brightness_temperature', 'GSICS', True), + ("IR_108", "radiance", "NOMINAL", True), + ("IR_108", "brightness_temperature", "GSICS", True), # HRV channel, internal coefficiens - ('HRV', 'counts', 'NOMINAL', False), - ('HRV', 'radiance', 'NOMINAL', False), - ('HRV', 'radiance', 'GSICS', False), - ('HRV', 'reflectance', 'NOMINAL', False), + ("HRV", "counts", "NOMINAL", False), + ("HRV", "radiance", "NOMINAL", False), + ("HRV", "radiance", "GSICS", False), + ("HRV", "reflectance", "NOMINAL", False), # HRV channel, external coefficients (mode should have no effect) - ('HRV', 'radiance', 'GSICS', True), - ('HRV', 'reflectance', 'NOMINAL', True), + ("HRV", "radiance", "GSICS", True), + ("HRV", "reflectance", "NOMINAL", True), ] ) def test_calibrate( @@ -493,7 +485,7 @@ def test_calibrate( ) fh = file_handler - fh.mda['spectral_channel_id'] = self.spectral_channel_ids[channel] + fh.mda["spectral_channel_id"] = self.spectral_channel_ids[channel] fh.channel_name = channel fh.calib_mode = calib_mode fh.ext_calib_coefs = external_coefs @@ -502,18 +494,18 @@ def test_calibrate( def test_mask_bad_quality(self, file_handler): """Test the masking of bad quality scan lines.""" - channel = 'VIS006' + channel = "VIS006" expected = self._get_expected( channel=channel, - calibration='radiance', - calib_mode='NOMINAL', + calibration="radiance", + calib_mode="NOMINAL", use_ext_coefs=False ) fh = file_handler res = fh._mask_bad_quality(expected) - new_data = np.zeros_like(expected.data).astype('float32') + new_data = np.zeros_like(expected.data).astype("float32") new_data[:, :] = np.nan expected = expected.copy(data=new_data) xr.testing.assert_equal(res, expected) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py index 841d45b943..b9ff1f95ea 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py @@ -28,15 +28,15 @@ def new_get_hd(instance, hdr_info): """Generate some metadata.""" - instance.mda = {'spectral_channel_id': 1} - instance.mda.setdefault('number_of_bits_per_pixel', 10) + instance.mda = {"spectral_channel_id": 1} + instance.mda.setdefault("number_of_bits_per_pixel", 10) - instance.mda['projection_parameters'] = {'a': 6378169.00, - 'b': 6356583.80, - 'h': 35785831.00, - 'SSP_longitude': 0.0} - instance.mda['orbital_parameters'] = {} - instance.mda['total_header_length'] = 12 + instance.mda["projection_parameters"] = {"a": 6378169.00, + "b": 6356583.80, + "h": 35785831.00, + "SSP_longitude": 0.0} + instance.mda["orbital_parameters"] = {} + instance.mda["total_header_length"] = 12 def get_new_read_prologue(prologue): @@ -55,18 +55,18 @@ def get_fake_file_handler(observation_start_time, nlines, ncols, projection_long epilogue = get_fake_epilogue() m = mock.mock_open() - with mock.patch('satpy.readers.seviri_l1b_hrit.np.fromfile') as fromfile, \ - mock.patch('satpy.readers.hrit_base.open', m, create=True) as newopen, \ - mock.patch('satpy.readers.utils.open', m, create=True) as utilopen, \ - mock.patch('satpy.readers.seviri_l1b_hrit.CHANNEL_NAMES'), \ - mock.patch.object(HRITMSGFileHandler, '_get_hd', new=new_get_hd), \ - mock.patch.object(HRITMSGPrologueFileHandler, 'read_prologue', + with mock.patch("satpy.readers.seviri_l1b_hrit.np.fromfile") as fromfile, \ + mock.patch("satpy.readers.hrit_base.open", m, create=True) as newopen, \ + mock.patch("satpy.readers.utils.open", m, create=True) as utilopen, \ + mock.patch("satpy.readers.seviri_l1b_hrit.CHANNEL_NAMES"), \ + mock.patch.object(HRITMSGFileHandler, "_get_hd", new=new_get_hd), \ + mock.patch.object(HRITMSGPrologueFileHandler, "read_prologue", new=get_new_read_prologue(prologue)): fromfile.return_value = np.array( [(1, 2)], - dtype=[('total_header_length', int), - ('hdr_id', int)] + dtype=[("total_header_length", int), + ("hdr_id", int)] ) newopen.return_value.__enter__.return_value.tell.return_value = 1 # The size of the return value hereafter was chosen arbitrarily with the expectation @@ -74,16 +74,16 @@ def get_fake_file_handler(observation_start_time, nlines, ncols, projection_long # files. utilopen.return_value.__enter__.return_value.read.return_value = bytes([0]*8192) prologue = HRITMSGPrologueFileHandler( - filename='dummy_prologue_filename', + filename="dummy_prologue_filename", filename_info=filename_info, filetype_info={} ) epilogue = mock.MagicMock(epilogue=epilogue) reader = HRITMSGFileHandler( - 'filename', + "filename", filename_info, - {'filetype': 'info'}, + {"filetype": "info"}, prologue, epilogue ) @@ -99,30 +99,30 @@ def get_fake_prologue(projection_longitude, orbit_polynomials): "SatelliteId": 324, "NominalLongitude": -3.5 }, - 'Orbit': { - 'OrbitPolynomial': orbit_polynomials, + "Orbit": { + "OrbitPolynomial": orbit_polynomials, } }, - 'GeometricProcessing': { - 'EarthModel': { - 'TypeOfEarthModel': 2, - 'EquatorialRadius': 6378.169, - 'NorthPolarRadius': 6356.5838, - 'SouthPolarRadius': 6356.5838 + "GeometricProcessing": { + "EarthModel": { + "TypeOfEarthModel": 2, + "EquatorialRadius": 6378.169, + "NorthPolarRadius": 6356.5838, + "SouthPolarRadius": 6356.5838 } }, - 'ImageDescription': { - 'ProjectionDescription': { - 'LongitudeOfSSP': projection_longitude + "ImageDescription": { + "ProjectionDescription": { + "LongitudeOfSSP": projection_longitude }, - 'Level15ImageProduction': { - 'ImageProcDirection': 1 + "Level15ImageProduction": { + "ImageProcDirection": 1 } }, - 'ImageAcquisition': { - 'PlannedAcquisitionTime': { - 'TrueRepeatCycleStart': datetime(2006, 1, 1, 12, 15, 9, 304888), - 'PlannedRepeatCycleEnd': datetime(2006, 1, 1, 12, 30, 0, 0) + "ImageAcquisition": { + "PlannedAcquisitionTime": { + "TrueRepeatCycleStart": datetime(2006, 1, 1, 12, 15, 9, 304888), + "PlannedRepeatCycleEnd": datetime(2006, 1, 1, 12, 30, 0, 0) } } } @@ -131,21 +131,21 @@ def get_fake_prologue(projection_longitude, orbit_polynomials): def get_fake_epilogue(): """Create a fake HRIT epilogue.""" return { - 'ImageProductionStats': { - 'ActualL15CoverageHRV': { - 'LowerSouthLineActual': 1, - 'LowerNorthLineActual': 8256, - 'LowerEastColumnActual': 2877, - 'LowerWestColumnActual': 8444, - 'UpperSouthLineActual': 8257, - 'UpperNorthLineActual': 11136, - 'UpperEastColumnActual': 1805, - 'UpperWestColumnActual': 7372 + "ImageProductionStats": { + "ActualL15CoverageHRV": { + "LowerSouthLineActual": 1, + "LowerNorthLineActual": 8256, + "LowerEastColumnActual": 2877, + "LowerWestColumnActual": 8444, + "UpperSouthLineActual": 8257, + "UpperNorthLineActual": 11136, + "UpperEastColumnActual": 1805, + "UpperWestColumnActual": 7372 }, - 'ActualScanningSummary': { - 'ReducedScan': 0, - 'ForwardScanStart': datetime(2006, 1, 1, 12, 15, 9, 304888), - 'ForwardScanEnd': datetime(2006, 1, 1, 12, 27, 39, 0) + "ActualScanningSummary": { + "ReducedScan": 0, + "ForwardScanStart": datetime(2006, 1, 1, 12, 15, 9, 304888), + "ForwardScanEnd": datetime(2006, 1, 1, 12, 27, 39, 0) } } } @@ -156,19 +156,19 @@ def get_fake_mda(nlines, ncols, start_time): nbits = 10 tline = get_acq_time_cds(start_time, nlines) return { - 'number_of_bits_per_pixel': nbits, - 'number_of_lines': nlines, - 'number_of_columns': ncols, - 'data_field_length': nlines * ncols * nbits, - 'cfac': 5, - 'lfac': 5, - 'coff': 10, - 'loff': 10, - 'image_segment_line_quality': { - 'line_mean_acquisition': tline, - 'line_validity': np.full(nlines, 3), - 'line_radiometric_quality': np.full(nlines, 4), - 'line_geometric_quality': np.full(nlines, 4) + "number_of_bits_per_pixel": nbits, + "number_of_lines": nlines, + "number_of_columns": ncols, + "data_field_length": nlines * ncols * nbits, + "cfac": 5, + "lfac": 5, + "coff": 10, + "loff": 10, + "image_segment_line_quality": { + "line_mean_acquisition": tline, + "line_validity": np.full(nlines, 3), + "line_radiometric_quality": np.full(nlines, 4), + "line_geometric_quality": np.full(nlines, 4) } } @@ -176,18 +176,18 @@ def get_fake_mda(nlines, ncols, start_time): def get_fake_filename_info(start_time): """Create fake filename information.""" return { - 'platform_shortname': 'MSG3', - 'start_time': start_time, - 'service': 'MSG' + "platform_shortname": "MSG3", + "start_time": start_time, + "service": "MSG" } def get_fake_dataset_info(): """Create fake dataset info.""" return { - 'units': 'units', - 'wavelength': 'wavelength', - 'standard_name': 'standard_name' + "units": "units", + "wavelength": "wavelength", + "standard_name": "standard_name" } @@ -196,47 +196,47 @@ def get_acq_time_cds(start_time, nlines): days_since_1958 = (start_time - datetime(1958, 1, 1)).days tline = np.zeros( nlines, - dtype=[('days', '>u2'), ('milliseconds', '>u4')] + dtype=[("days", ">u2"), ("milliseconds", ">u4")] ) - tline['days'][1:-1] = days_since_1958 * np.ones(nlines - 2) + tline["days"][1:-1] = days_since_1958 * np.ones(nlines - 2) offset_second = (start_time - start_time.replace(hour=0, minute=0, second=0, microsecond=0)).total_seconds()*1000 - tline['milliseconds'][1:-1] = np.arange(nlines - 2)+offset_second + tline["milliseconds"][1:-1] = np.arange(nlines - 2)+offset_second return tline def get_acq_time_exp(start_time, nlines): """Get expected scanline acquisition times.""" - tline_exp = np.zeros(464, dtype='datetime64[ms]') - tline_exp[0] = np.datetime64('NaT') - tline_exp[-1] = np.datetime64('NaT') + tline_exp = np.zeros(464, dtype="datetime64[ms]") + tline_exp[0] = np.datetime64("NaT") + tline_exp[-1] = np.datetime64("NaT") tline_exp[1:-1] = np.datetime64(start_time) - tline_exp[1:-1] += np.arange(nlines - 2).astype('timedelta64[ms]') + tline_exp[1:-1] += np.arange(nlines - 2).astype("timedelta64[ms]") return tline_exp def get_attrs_exp(projection_longitude=0.0): """Get expected dataset attributes.""" return { - 'units': 'units', - 'wavelength': 'wavelength', - 'standard_name': 'standard_name', - 'platform_name': 'Meteosat-11', - 'sensor': 'seviri', - 'orbital_parameters': {'projection_longitude': projection_longitude, - 'projection_latitude': 0., - 'projection_altitude': 35785831.0, - 'satellite_nominal_longitude': -3.5, - 'satellite_nominal_latitude': 0.0, - 'satellite_actual_longitude': -3.55117540817073, - 'satellite_actual_latitude': -0.5711243456528018, - 'satellite_actual_altitude': 35783296.150123544}, - 'georef_offset_corrected': True, - 'nominal_start_time': (datetime(2006, 1, 1, 12, 15),), - 'nominal_end_time': (datetime(2006, 1, 1, 12, 30),), - 'time_parameters': { - 'nominal_start_time': datetime(2006, 1, 1, 12, 15), - 'nominal_end_time': datetime(2006, 1, 1, 12, 30), - 'observation_start_time': datetime(2006, 1, 1, 12, 15, 9, 304888), - 'observation_end_time': datetime(2006, 1, 1, 12, 27, 39, 0) + "units": "units", + "wavelength": "wavelength", + "standard_name": "standard_name", + "platform_name": "Meteosat-11", + "sensor": "seviri", + "orbital_parameters": {"projection_longitude": projection_longitude, + "projection_latitude": 0., + "projection_altitude": 35785831.0, + "satellite_nominal_longitude": -3.5, + "satellite_nominal_latitude": 0.0, + "satellite_actual_longitude": -3.55117540817073, + "satellite_actual_latitude": -0.5711243456528018, + "satellite_actual_altitude": 35783296.150123544}, + "georef_offset_corrected": True, + "nominal_start_time": (datetime(2006, 1, 1, 12, 15),), + "nominal_end_time": (datetime(2006, 1, 1, 12, 30),), + "time_parameters": { + "nominal_start_time": datetime(2006, 1, 1, 12, 15), + "nominal_end_time": datetime(2006, 1, 1, 12, 30), + "observation_start_time": datetime(2006, 1, 1, 12, 15, 9, 304888), + "observation_end_time": datetime(2006, 1, 1, 12, 27, 39, 0) } } diff --git a/satpy/tests/reader_tests/test_seviri_l1b_icare.py b/satpy/tests/reader_tests/test_seviri_l1b_icare.py index 5ca8ac1a2e..372611c87d 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_icare.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_icare.py @@ -38,56 +38,56 @@ class FakeHDF4FileHandler2(FakeHDF4FileHandler): def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} - file_content['/attr/Nadir_Pixel_Size'] = 3000. - file_content['/attr/Beginning_Acquisition_Date'] = "2004-12-29T12:15:00Z" - file_content['/attr/End_Acquisition_Date'] = "2004-12-29T12:27:44Z" - file_content['/attr/Geolocation'] = ('1.3642337E7', '1856.0', '1.3642337E7', '1856.0') - file_content['/attr/Altitude'] = '42164.0' - file_content['/attr/Geographic_Projection'] = 'geos' - file_content['/attr/Projection_Longitude'] = '0.0' - file_content['/attr/Sub_Satellite_Longitude'] = '3.4' - file_content['/attr/Sensors'] = 'MSG1/SEVIRI' - file_content['/attr/Zone'] = 'G' - file_content['/attr/_FillValue'] = 1 - file_content['/attr/scale_factor'] = 1. - file_content['/attr/add_offset'] = 0. + file_content["/attr/Nadir_Pixel_Size"] = 3000. + file_content["/attr/Beginning_Acquisition_Date"] = "2004-12-29T12:15:00Z" + file_content["/attr/End_Acquisition_Date"] = "2004-12-29T12:27:44Z" + file_content["/attr/Geolocation"] = ("1.3642337E7", "1856.0", "1.3642337E7", "1856.0") + file_content["/attr/Altitude"] = "42164.0" + file_content["/attr/Geographic_Projection"] = "geos" + file_content["/attr/Projection_Longitude"] = "0.0" + file_content["/attr/Sub_Satellite_Longitude"] = "3.4" + file_content["/attr/Sensors"] = "MSG1/SEVIRI" + file_content["/attr/Zone"] = "G" + file_content["/attr/_FillValue"] = 1 + file_content["/attr/scale_factor"] = 1. + file_content["/attr/add_offset"] = 0. # test one IR and one VIS channel - file_content['Normalized_Radiance'] = DEFAULT_FILE_DATA - file_content['Normalized_Radiance/attr/_FillValue'] = 1 - file_content['Normalized_Radiance/attr/scale_factor'] = 1. - file_content['Normalized_Radiance/attr/add_offset'] = 0. - file_content['Normalized_Radiance/shape'] = DEFAULT_FILE_SHAPE - - file_content['Brightness_Temperature'] = DEFAULT_FILE_DATA - file_content['Brightness_Temperature/attr/_FillValue'] = 1 - file_content['Brightness_Temperature/attr/scale_factor'] = 1. - file_content['Brightness_Temperature/attr/add_offset'] = 0. - file_content['Brightness_Temperature/shape'] = DEFAULT_FILE_SHAPE + file_content["Normalized_Radiance"] = DEFAULT_FILE_DATA + file_content["Normalized_Radiance/attr/_FillValue"] = 1 + file_content["Normalized_Radiance/attr/scale_factor"] = 1. + file_content["Normalized_Radiance/attr/add_offset"] = 0. + file_content["Normalized_Radiance/shape"] = DEFAULT_FILE_SHAPE + + file_content["Brightness_Temperature"] = DEFAULT_FILE_DATA + file_content["Brightness_Temperature/attr/_FillValue"] = 1 + file_content["Brightness_Temperature/attr/scale_factor"] = 1. + file_content["Brightness_Temperature/attr/add_offset"] = 0. + file_content["Brightness_Temperature/shape"] = DEFAULT_FILE_SHAPE # convert to xarrays from xarray import DataArray for key, val in file_content.items(): if isinstance(val, np.ndarray): attrs = {} - for a in ['_FillValue', 'scale_factor', 'add_offset']: - if key + '/attr/' + a in file_content: - attrs[a] = file_content[key + '/attr/' + a] - file_content[key] = DataArray(da.from_array(val), dims=('x', 'y'), attrs=attrs) + for a in ["_FillValue", "scale_factor", "add_offset"]: + if key + "/attr/" + a in file_content: + attrs[a] = file_content[key + "/attr/" + a] + file_content[key] = DataArray(da.from_array(val), dims=("x", "y"), attrs=attrs) return file_content class TestSEVIRIICAREReader(unittest.TestCase): """Test SEVIRI L1b HDF4 from ICARE Reader.""" - yaml_file = 'seviri_l1b_icare.yaml' + yaml_file = "seviri_l1b_icare.yaml" def setUp(self): """Wrap HDF4 file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.seviri_l1b_icare import SEVIRI_ICARE - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) - self.p = mock.patch.object(SEVIRI_ICARE, '__bases__', (FakeHDF4FileHandler2,)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + self.p = mock.patch.object(SEVIRI_ICARE, "__bases__", (FakeHDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -97,78 +97,78 @@ def tearDown(self): def compare_areas(self, v): """Compare produced AreaDefinition with expected.""" - test_area = {'area_id': 'geosmsg', - 'width': 10, - 'height': 300, - 'area_extent': (-5567248.2834071, + test_area = {"area_id": "geosmsg", + "width": 10, + "height": 300, + "area_extent": (-5567248.2834071, -5570248.6866857, -5537244.2506213, -4670127.7031114)} - self.assertEqual(v.attrs['area'].area_id, test_area['area_id']) - self.assertEqual(v.attrs['area'].width, test_area['width']) - self.assertEqual(v.attrs['area'].height, test_area['height']) - np.testing.assert_almost_equal(v.attrs['area'].area_extent, - test_area['area_extent']) + assert v.attrs["area"].area_id == test_area["area_id"] + assert v.attrs["area"].width == test_area["width"] + assert v.attrs["area"].height == test_area["height"] + np.testing.assert_almost_equal(v.attrs["area"].area_extent, + test_area["area_extent"]) def test_init(self): """Test basic init with no extra parameters.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf', - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_IR108_V1-04.hdf' + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf", + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_IR108_V1-04.hdf" ]) - self.assertEqual(len(loadables), 2) + assert len(loadables) == 2 r.create_filehandlers(loadables) - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_dataset_vis(self): """Test loading all datasets from a full swath file.""" from datetime import datetime r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf' + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf" ]) r.create_filehandlers(loadables) - datasets = r.load(['VIS008']) - self.assertEqual(len(datasets), 1) + datasets = r.load(["VIS008"]) + assert len(datasets) == 1 for v in datasets.values(): dt = datetime(2004, 12, 29, 12, 27, 44) - self.assertEqual(v.attrs['end_time'], dt) - self.assertEqual(v.attrs['calibration'], 'reflectance') + assert v.attrs["end_time"] == dt + assert v.attrs["calibration"] == "reflectance" def test_load_dataset_ir(self): """Test loading all datasets from a full swath file.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_IR108_V1-04.hdf' + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_IR108_V1-04.hdf" ]) r.create_filehandlers(loadables) - datasets = r.load(['IR_108']) - self.assertEqual(len(datasets), 1) + datasets = r.load(["IR_108"]) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs['calibration'], 'brightness_temperature') + assert v.attrs["calibration"] == "brightness_temperature" def test_area_def_lores(self): """Test loading all datasets from an area of interest file.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf' + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf" ]) r.create_filehandlers(loadables) - ds = r.load(['VIS008']) - self.compare_areas(ds['VIS008']) - self.assertEqual(ds['VIS008'].attrs['area'].proj_id, 'msg_lowres') + ds = r.load(["VIS008"]) + self.compare_areas(ds["VIS008"]) + assert ds["VIS008"].attrs["area"].proj_id == "msg_lowres" def test_area_def_hires(self): """Test loading all datasets from an area of interest file.""" r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_HRV_V1-04.hdf', + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_HRV_V1-04.hdf", ]) r.create_filehandlers(loadables) - ds = r.load(['HRV']) - self.compare_areas(ds['HRV']) - self.assertEqual(ds['HRV'].attrs['area'].proj_id, 'msg_hires') + ds = r.load(["HRV"]) + self.compare_areas(ds["HRV"]) + assert ds["HRV"].attrs["area"].proj_id == "msg_hires" def test_sensor_names(self): """Check satellite name conversion is correct, including error case.""" @@ -176,12 +176,12 @@ def test_sensor_names(self): mock.MagicMock(), mock.MagicMock(), mock.MagicMock()) - sensor_list = {'Meteosat-08': 'MSG1/SEVIRI', - 'Meteosat-09': 'MSG2/SEVIRI', - 'Meteosat-10': 'MSG3/SEVIRI', - 'Meteosat-11': 'MSG4/SEVIRI'} - with mock.patch('satpy.tests.reader_tests.test_seviri_l1b_icare.' - 'FakeHDF4FileHandler2.get_test_content') as patched_func: + sensor_list = {"Meteosat-08": "MSG1/SEVIRI", + "Meteosat-09": "MSG2/SEVIRI", + "Meteosat-10": "MSG3/SEVIRI", + "Meteosat-11": "MSG4/SEVIRI"} + with mock.patch("satpy.tests.reader_tests.test_seviri_l1b_icare." + "FakeHDF4FileHandler2.get_test_content") as patched_func: def _run_target(): patched_func.return_value = file_data return self.p.target(mock.MagicMock(), @@ -189,12 +189,12 @@ def _run_target(): mock.MagicMock()).sensor_name for sat in sensor_list: - file_data['/attr/Sensors'] = sensor_list[sat] + file_data["/attr/Sensors"] = sensor_list[sat] plat, sens = _run_target() - self.assertEqual(plat, sat) + assert plat == sat with self.assertRaises(NameError): - file_data['/attr/Sensors'] = 'BADSAT/NOSENSE' + file_data["/attr/Sensors"] = "BADSAT/NOSENSE" plat, sens = _run_target() def test_bad_bandname(self): @@ -202,7 +202,7 @@ def test_bad_bandname(self): with self.assertRaises(NameError): self.p.target(mock.MagicMock(), mock.MagicMock(), - mock.MagicMock())._get_dsname({'name': 'badband'}) + mock.MagicMock())._get_dsname({"name": "badband"}) def test_nocompute(self): """Test that dask does not compute anything in the reader itself.""" @@ -212,7 +212,7 @@ def test_nocompute(self): with dask.config.set(scheduler=CustomScheduler(max_computes=0)): r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf' + "GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf" ]) r.create_filehandlers(loadables) - r.load(['VIS008']) + r.load(["VIS008"]) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index e344d09ff9..ba7cf63447 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -43,476 +43,476 @@ from satpy.tests.reader_tests.test_seviri_l1b_calibration import TestFileHandlerCalibrationBase from satpy.tests.utils import assert_attrs_equal, make_dataid -CHANNEL_INDEX_LIST = ['VIS006', 'VIS008', 'IR_016', 'IR_039', - 'WV_062', 'WV_073', 'IR_087', 'IR_097', - 'IR_108', 'IR_120', 'IR_134', 'HRV'] +CHANNEL_INDEX_LIST = ["VIS006", "VIS008", "IR_016", "IR_039", + "WV_062", "WV_073", "IR_087", "IR_097", + "IR_108", "IR_120", "IR_134", "HRV"] AVAILABLE_CHANNELS = {} for item in CHANNEL_INDEX_LIST: AVAILABLE_CHANNELS[item] = True -SEC15HDR = '15_SECONDARY_PRODUCT_HEADER' -IDS = 'SelectedBandIDs' +SEC15HDR = "15_SECONDARY_PRODUCT_HEADER" +IDS = "SelectedBandIDs" TEST1_HEADER_CHNLIST: dict[str, dict[str, dict]] = {SEC15HDR: {IDS: {}}} -TEST1_HEADER_CHNLIST[SEC15HDR][IDS]['Value'] = 'XX--XX--XX--' +TEST1_HEADER_CHNLIST[SEC15HDR][IDS]["Value"] = "XX--XX--XX--" TEST2_HEADER_CHNLIST: dict[str, dict[str, dict]] = {SEC15HDR: {IDS: {}}} -TEST2_HEADER_CHNLIST[SEC15HDR][IDS]['Value'] = 'XX-XXXX----X' +TEST2_HEADER_CHNLIST[SEC15HDR][IDS]["Value"] = "XX-XXXX----X" TEST3_HEADER_CHNLIST: dict[str, dict[str, dict]] = {SEC15HDR: {IDS: {}}} -TEST3_HEADER_CHNLIST[SEC15HDR][IDS]['Value'] = 'XXXXXXXXXXXX' +TEST3_HEADER_CHNLIST[SEC15HDR][IDS]["Value"] = "XXXXXXXXXXXX" TEST_AREA_EXTENT_EARTHMODEL1_VISIR_FULLDISK = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 3712, - 'Area extent': (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) + "earth_model": 1, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": True, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 3712, + "Area extent": (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 1392, - 'Area extent': (5568748.275756836, 5568748.275756836, -5568748.275756836, 1392187.068939209) + "earth_model": 1, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_rss_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "9.5", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 1392, + "Area extent": (5568748.275756836, 5568748.275756836, -5568748.275756836, 1392187.068939209) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN_FILL = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 3712, - 'Area extent': (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) + "earth_model": 1, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_rss_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "9.5", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 3712, + "Area extent": (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 2516, - 'Number of rows': 1829, - 'Area extent': (5337717.232, 5154692.6389, -2211297.1332, -333044.7514) + "earth_model": 1, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 2516, + "Number of rows": 1829, + "Area extent": (5337717.232, 5154692.6389, -2211297.1332, -333044.7514) } } TEST_AREA_EXTENT_EARTHMODEL1_VISIR_ROI_FILL = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 3712, - 'Area extent': (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) + "earth_model": 1, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_fes_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 3712, + "Area extent": (5568748.2758, 5568748.2758, -5568748.2758, -5568748.2758) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 5568, - 'Number of rows': 11136, - 'Area extent 0': (5567747.920155525, 2625352.665781975, -1000.1343488693237, -5567747.920155525), - 'Area extent 1': (3602483.924627304, 5569748.188853264, -1966264.1298770905, 2625352.665781975) + "earth_model": 1, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": True, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 5568, + "Number of rows": 11136, + "Area extent 0": (5567747.920155525, 2625352.665781975, -1000.1343488693237, -5567747.920155525), + "Area extent 1": (3602483.924627304, 5569748.188853264, -1966264.1298770905, 2625352.665781975) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK_FILL = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 11136, - 'Number of rows': 11136, - 'Area extent': (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) + "earth_model": 1, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": True, + "is_rapid_scan": 0, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 11136, + "Number of rows": 11136, + "Area extent": (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 5568, - 'Number of rows': 8192, - 'Area extent': (5567747.920155525, 2625352.665781975, -1000.1343488693237, -5567747.920155525) + "earth_model": 1, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_rss_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "9.5", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 5568, + "Number of rows": 8192, + "Area extent": (5567747.920155525, 2625352.665781975, -1000.1343488693237, -5567747.920155525) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_RAPIDSCAN_FILL = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 11136, - 'Number of rows': 11136, - 'Area extent': (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) + "earth_model": 1, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_rss_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "9.5", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 11136, + "Number of rows": 11136, + "Area extent": (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 7548, - 'Number of rows': 5487, - 'Area extent': (5336716.885566711, 5155692.568421364, -2212297.179698944, -332044.6038246155) + "earth_model": 1, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 7548, + "Number of rows": 5487, + "Area extent": (5336716.885566711, 5155692.568421364, -2212297.179698944, -332044.6038246155) } } TEST_AREA_EXTENT_EARTHMODEL1_HRV_ROI_FILL = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 11136, - 'Number of rows': 11136, - 'Area extent': (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) + "earth_model": 1, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 11136, + "Number of rows": 11136, + "Area extent": (5567747.920155525, 5569748.188853264, -5569748.188853264, -5567747.920155525) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_FULLDISK = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 3712, - 'Area extent': (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) + "earth_model": 2, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": True, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 3712, + "Area extent": (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 5568, - 'Number of rows': 11136, - 'Area extent 0': (5566247.718632221, 2626852.867305279, -2500.3358721733093, -5566247.718632221), - 'Area extent 1': (3600983.723104, 5571248.390376568, -1967764.3314003944, 2626852.867305279) + "earth_model": 2, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": True, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 5568, + "Number of rows": 11136, + "Area extent 0": (5566247.718632221, 2626852.867305279, -2500.3358721733093, -5566247.718632221), + "Area extent 1": (3600983.723104, 5571248.390376568, -1967764.3314003944, 2626852.867305279) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK_FILL = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 11136, - 'Number of rows': 11136, - 'Area extent': (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) + "earth_model": 2, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": True, + "is_rapid_scan": 0, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 11136, + "Number of rows": 11136, + "Area extent": (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 1392, - 'Area extent': (5567248.074173927, 5570248.477339745, -5570248.477339745, 1393687.2705221176) + "earth_model": 2, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_rss_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "9.5", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 1392, + "Area extent": (5567248.074173927, 5570248.477339745, -5570248.477339745, 1393687.2705221176) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_RAPIDSCAN_FILL = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '9.5', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 3712, - 'Area extent': (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) + "earth_model": 2, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_rss_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "9.5", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 3712, + "Area extent": (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 5568, - 'Number of rows': 8192, - 'Area extent': (5566247.718632221, 2626852.867305279, -2500.3358721733093, -5566247.718632221) + "earth_model": 2, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_rss_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 5568, + "Number of rows": 8192, + "Area extent": (5566247.718632221, 2626852.867305279, -2500.3358721733093, -5566247.718632221) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_RAPIDSCAN_FILL = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_rss_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 11136, - 'Number of rows': 11136, - 'Area extent': (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) + "earth_model": 2, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 1, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_rss_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 11136, + "Number of rows": 11136, + "Area extent": (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 2516, - 'Number of rows': 1829, - 'Area extent': (5336217.0304, 5156192.8405, -2212797.3348, -331544.5498) + "earth_model": 2, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 2516, + "Number of rows": 1829, + "Area extent": (5336217.0304, 5156192.8405, -2212797.3348, -331544.5498) } } TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI_FILL = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='VIS006', resolution=3000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_3km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 3712, - 'Number of rows': 3712, - 'Area extent': (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) + "earth_model": 2, + "dataset_id": make_dataid(name="VIS006", resolution=3000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_fes_3km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 3712, + "Number of rows": 3712, + "Area extent": (5567248.0742, 5570248.4773, -5570248.4773, -5567248.0742) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': False, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 7548, - 'Number of rows': 5487, - 'Area extent': (5335216.684043407, 5157192.769944668, -2213797.381222248, -330544.4023013115) + "earth_model": 2, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": False, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 7548, + "Number of rows": 5487, + "Area extent": (5335216.684043407, 5157192.769944668, -2213797.381222248, -330544.4023013115) } } TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI_FILL = { - 'earth_model': 2, - 'dataset_id': make_dataid(name='HRV', resolution=1000), - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'fill_disk': True, - 'expected_area_def': { - 'Area ID': 'msg_seviri_fes_1km', - 'Projection': {'a': '6378169000', 'b': '6356583800', 'h': '35785831', - 'lon_0': '0', 'no_defs': 'None', 'proj': 'geos', - 'type': 'crs', 'units': 'm', 'x_0': '0', 'y_0': '0'}, - 'Number of columns': 11136, - 'Number of rows': 11136, - 'Area extent': (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) + "earth_model": 2, + "dataset_id": make_dataid(name="HRV", resolution=1000), + "is_full_disk": False, + "is_rapid_scan": 0, + "fill_disk": True, + "expected_area_def": { + "Area ID": "msg_seviri_fes_1km", + "Projection": {"a": "6378169000", "b": "6356583800", "h": "35785831", + "lon_0": "0", "no_defs": "None", "proj": "geos", + "type": "crs", "units": "m", "x_0": "0", "y_0": "0"}, + "Number of columns": 11136, + "Number of rows": 11136, + "Area extent": (5566247.718632221, 5571248.390376568, -5571248.390376568, -5566247.718632221) } } TEST_IS_ROI_FULLDISK = { - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'is_roi': False + "is_full_disk": True, + "is_rapid_scan": 0, + "is_roi": False } TEST_IS_ROI_RAPIDSCAN = { - 'is_full_disk': False, - 'is_rapid_scan': 1, - 'is_roi': False + "is_full_disk": False, + "is_rapid_scan": 1, + "is_roi": False } TEST_IS_ROI_ROI = { - 'is_full_disk': False, - 'is_rapid_scan': 0, - 'is_roi': True + "is_full_disk": False, + "is_rapid_scan": 0, + "is_roi": True } TEST_CALIBRATION_MODE = { - 'earth_model': 1, - 'dataset_id': make_dataid(name='IR_108', calibration='radiance'), - 'is_full_disk': True, - 'is_rapid_scan': 0, - 'calibration': 'radiance', - 'CalSlope': [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.96, 0.97], - 'CalOffset': [-1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0], - 'GSICSCalCoeff': [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.96, 0.97], - 'GSICSOffsetCount': [-51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0] + "earth_model": 1, + "dataset_id": make_dataid(name="IR_108", calibration="radiance"), + "is_full_disk": True, + "is_rapid_scan": 0, + "calibration": "radiance", + "CalSlope": [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.96, 0.97], + "CalOffset": [-1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0, -1.0], + "GSICSCalCoeff": [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95, 0.96, 0.97], + "GSICSOffsetCount": [-51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0, -51.0] } TEST_PADDER_RSS_ROI = { - 'img_bounds': {'south': [2], 'north': [4], 'east': [2], 'west': [3]}, - 'is_full_disk': False, - 'dataset_id': make_dataid(name='VIS006'), - 'dataset': xr.DataArray(np.ones((3, 2)), dims=['y', 'x']).astype(np.float32), - 'final_shape': (5, 5), - 'expected_padded_data': xr.DataArray(np.array([[np.nan, np.nan, np.nan, np.nan, np.nan], + "img_bounds": {"south": [2], "north": [4], "east": [2], "west": [3]}, + "is_full_disk": False, + "dataset_id": make_dataid(name="VIS006"), + "dataset": xr.DataArray(np.ones((3, 2)), dims=["y", "x"]).astype(np.float32), + "final_shape": (5, 5), + "expected_padded_data": xr.DataArray(np.array([[np.nan, np.nan, np.nan, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, np.nan, np.nan, np.nan, np.nan]]), - dims=['y', 'x']).astype(np.float32) + dims=["y", "x"]).astype(np.float32) } TEST_PADDER_FES_HRV = { - 'img_bounds': {'south': [1, 4], 'north': [3, 5], 'east': [2, 3], 'west': [3, 4]}, - 'is_full_disk': True, - 'dataset_id': make_dataid(name='HRV'), - 'dataset': xr.DataArray(np.ones((5, 2)), dims=['y', 'x']).astype(np.float32), - 'final_shape': (5, 5), - 'expected_padded_data': xr.DataArray(np.array([[np.nan, 1.0, 1.0, np.nan, np.nan], + "img_bounds": {"south": [1, 4], "north": [3, 5], "east": [2, 3], "west": [3, 4]}, + "is_full_disk": True, + "dataset_id": make_dataid(name="HRV"), + "dataset": xr.DataArray(np.ones((5, 2)), dims=["y", "x"]).astype(np.float32), + "final_shape": (5, 5), + "expected_padded_data": xr.DataArray(np.array([[np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, 1.0, 1.0, np.nan, np.nan], [np.nan, np.nan, 1.0, 1.0, np.nan], [np.nan, np.nan, 1.0, 1.0, np.nan]]), - dims=['y', 'x']).astype(np.float32) + dims=["y", "x"]).astype(np.float32) } -def create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan, good_qual='OK'): +def create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan, good_qual="OK"): """Create test header for SEVIRI L1.5 product. Header includes mandatory attributes for NativeMSGFileHandler.get_area_extent """ - if dataset_id['name'] == 'HRV': - reference_grid = 'ReferenceGridHRV' + if dataset_id["name"] == "HRV": + reference_grid = "ReferenceGridHRV" column_dir_grid_step = 1.0001343488693237 line_dir_grid_step = 1.0001343488693237 else: - reference_grid = 'ReferenceGridVIS_IR' + reference_grid = "ReferenceGridVIS_IR" column_dir_grid_step = 3.0004031658172607 line_dir_grid_step = 3.0004031658172607 @@ -547,45 +547,45 @@ def create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan, goo n_hrv_lines = n_visir_lines * 3 ssp_lon = 0 header = { - '15_MAIN_PRODUCT_HEADER': { - 'QQOV': {'Name': 'QQOV', - 'Value': good_qual} + "15_MAIN_PRODUCT_HEADER": { + "QQOV": {"Name": "QQOV", + "Value": good_qual} }, - '15_DATA_HEADER': { - 'ImageDescription': { + "15_DATA_HEADER": { + "ImageDescription": { reference_grid: { - 'ColumnDirGridStep': column_dir_grid_step, - 'LineDirGridStep': line_dir_grid_step, - 'GridOrigin': 2, # south-east corner + "ColumnDirGridStep": column_dir_grid_step, + "LineDirGridStep": line_dir_grid_step, + "GridOrigin": 2, # south-east corner }, - 'ProjectionDescription': { - 'LongitudeOfSSP': ssp_lon + "ProjectionDescription": { + "LongitudeOfSSP": ssp_lon } }, - 'GeometricProcessing': { - 'EarthModel': { - 'TypeOfEarthModel': earth_model, - 'EquatorialRadius': 6378169.0, - 'NorthPolarRadius': 6356583.800000001, - 'SouthPolarRadius': 6356583.800000001, + "GeometricProcessing": { + "EarthModel": { + "TypeOfEarthModel": earth_model, + "EquatorialRadius": 6378169.0, + "NorthPolarRadius": 6356583.800000001, + "SouthPolarRadius": 6356583.800000001, } }, - 'SatelliteStatus': { - 'SatelliteDefinition': { - 'SatelliteId': 324 + "SatelliteStatus": { + "SatelliteDefinition": { + "SatelliteId": 324 } } }, - '15_SECONDARY_PRODUCT_HEADER': { - 'NorthLineSelectedRectangle': {'Value': north}, - 'EastColumnSelectedRectangle': {'Value': east}, - 'WestColumnSelectedRectangle': {'Value': west}, - 'SouthLineSelectedRectangle': {'Value': south}, - 'SelectedBandIDs': {'Value': 'xxxxxxxxxxxx'}, - 'NumberColumnsVISIR': {'Value': n_visir_cols}, - 'NumberLinesVISIR': {'Value': n_visir_lines}, - 'NumberColumnsHRV': {'Value': n_hrv_cols}, - 'NumberLinesHRV': {'Value': n_hrv_lines}, + "15_SECONDARY_PRODUCT_HEADER": { + "NorthLineSelectedRectangle": {"Value": north}, + "EastColumnSelectedRectangle": {"Value": east}, + "WestColumnSelectedRectangle": {"Value": west}, + "SouthLineSelectedRectangle": {"Value": south}, + "SelectedBandIDs": {"Value": "xxxxxxxxxxxx"}, + "NumberColumnsVISIR": {"Value": n_visir_cols}, + "NumberLinesVISIR": {"Value": n_visir_lines}, + "NumberColumnsHRV": {"Value": n_hrv_cols}, + "NumberLinesHRV": {"Value": n_hrv_lines}, } } @@ -599,20 +599,20 @@ def create_test_trailer(is_rapid_scan): Trailer includes mandatory attributes for NativeMSGFileHandler.get_area_extent """ trailer = { - '15TRAILER': { - 'ImageProductionStats': { - 'ActualL15CoverageHRV': { - 'UpperNorthLineActual': 11136, - 'UpperWestColumnActual': 7533, - 'UpperSouthLineActual': 8193, - 'UpperEastColumnActual': 1966, - 'LowerNorthLineActual': 8192, - 'LowerWestColumnActual': 5568, - 'LowerSouthLineActual': 1, - 'LowerEastColumnActual': 1 + "15TRAILER": { + "ImageProductionStats": { + "ActualL15CoverageHRV": { + "UpperNorthLineActual": 11136, + "UpperWestColumnActual": 7533, + "UpperSouthLineActual": 8193, + "UpperEastColumnActual": 1966, + "LowerNorthLineActual": 8192, + "LowerWestColumnActual": 5568, + "LowerSouthLineActual": 1, + "LowerEastColumnActual": 1 }, - 'ActualScanningSummary': { - 'ReducedScan': is_rapid_scan + "ActualScanningSummary": { + "ReducedScan": is_rapid_scan } } } @@ -623,21 +623,21 @@ def create_test_trailer(is_rapid_scan): def prepare_area_definitions(test_dict): """Prepare calculated and expected area definitions for equal checking.""" - earth_model = test_dict['earth_model'] - dataset_id = test_dict['dataset_id'] - is_full_disk = test_dict['is_full_disk'] - is_rapid_scan = test_dict['is_rapid_scan'] - fill_disk = test_dict['fill_disk'] + earth_model = test_dict["earth_model"] + dataset_id = test_dict["dataset_id"] + is_full_disk = test_dict["is_full_disk"] + is_rapid_scan = test_dict["is_rapid_scan"] + fill_disk = test_dict["fill_disk"] header = create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan) trailer = create_test_trailer(is_rapid_scan) - expected_area_def = test_dict['expected_area_def'] + expected_area_def = test_dict["expected_area_def"] - with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ - mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ + with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile, \ + mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap") as _get_memmap, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch( - 'satpy.readers.seviri_l1b_native.has_archive_header' + "satpy.readers.seviri_l1b_native.has_archive_header" ) as has_archive_header: has_archive_header.return_value = True fromfile.return_value = header @@ -654,8 +654,8 @@ def prepare_area_definitions(test_dict): @pytest.mark.parametrize( - "actual, expected", - ( + ("actual", "expected"), + [ (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_FULLDISK)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK_FILL)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_VISIR_RAPIDSCAN)), @@ -676,52 +676,52 @@ def prepare_area_definitions(test_dict): (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_VISIR_ROI_FILL)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_ROI_FILL)), - ) + ] ) def test_area_definitions(actual, expected): """Test area definitions with only one area.""" np.testing.assert_allclose(np.array(actual.area_extent), - np.array(expected['Area extent'])) - assert actual.width == expected['Number of columns'] - assert actual.height == expected['Number of rows'] - assert actual.area_id == expected['Area ID'] + np.array(expected["Area extent"])) + assert actual.width == expected["Number of columns"] + assert actual.height == expected["Number of rows"] + assert actual.area_id == expected["Area ID"] @pytest.mark.parametrize( - "actual, expected", - ( + ("actual", "expected"), + [ (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL1_HRV_FULLDISK)), (prepare_area_definitions(TEST_AREA_EXTENT_EARTHMODEL2_HRV_FULLDISK)), - ) + ] ) def test_stacked_area_definitions(actual, expected): """Test area definitions with stacked areas.""" np.testing.assert_allclose(np.array(actual.defs[0].area_extent), - np.array(expected['Area extent 0'])) + np.array(expected["Area extent 0"])) np.testing.assert_allclose(np.array(actual.defs[1].area_extent), - np.array(expected['Area extent 1'])) - assert actual.width == expected['Number of columns'] - assert actual.height == expected['Number of rows'] - assert actual.defs[0].area_id, expected['Area ID'] - assert actual.defs[1].area_id, expected['Area ID'] + np.array(expected["Area extent 1"])) + assert actual.width == expected["Number of columns"] + assert actual.height == expected["Number of rows"] + assert actual.defs[0].area_id, expected["Area ID"] + assert actual.defs[1].area_id, expected["Area ID"] def prepare_is_roi(test_dict): """Prepare calculated and expected check for region of interest data for equal checking.""" earth_model = 2 - dataset_id = make_dataid(name='VIS006') - is_full_disk = test_dict['is_full_disk'] - is_rapid_scan = test_dict['is_rapid_scan'] + dataset_id = make_dataid(name="VIS006") + is_full_disk = test_dict["is_full_disk"] + is_rapid_scan = test_dict["is_rapid_scan"] header = create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan) trailer = create_test_trailer(is_rapid_scan) - expected = test_dict['is_roi'] + expected = test_dict["is_roi"] - with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ - mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ + with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile, \ + mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap") as _get_memmap, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch( - 'satpy.readers.seviri_l1b_native.has_archive_header' + "satpy.readers.seviri_l1b_native.has_archive_header" ) as has_archive_header: has_archive_header.return_value = True fromfile.return_value = header @@ -736,12 +736,12 @@ def prepare_is_roi(test_dict): @pytest.mark.parametrize( - "actual, expected", - ( + ("actual", "expected"), + [ (prepare_is_roi(TEST_IS_ROI_FULLDISK)), (prepare_is_roi(TEST_IS_ROI_RAPIDSCAN)), (prepare_is_roi(TEST_IS_ROI_ROI)), - ) + ] ) def test_is_roi(actual, expected): """Test if given area is of area-of-interest.""" @@ -754,41 +754,41 @@ class TestNativeMSGFileHandler(unittest.TestCase): def test_get_available_channels(self): """Test the derivation of the available channel list.""" available_chs = get_available_channels(TEST1_HEADER_CHNLIST) - trues = ('WV_062', 'WV_073', 'IR_108', 'VIS006', 'VIS008', 'IR_120') + trues = ("WV_062", "WV_073", "IR_108", "VIS006", "VIS008", "IR_120") for bandname in AVAILABLE_CHANNELS: if bandname in trues: - self.assertTrue(available_chs[bandname]) + assert available_chs[bandname] else: - self.assertFalse(available_chs[bandname]) + assert not available_chs[bandname] available_chs = get_available_channels(TEST2_HEADER_CHNLIST) - trues = ('VIS006', 'VIS008', 'IR_039', 'WV_062', 'WV_073', 'IR_087', 'HRV') + trues = ("VIS006", "VIS008", "IR_039", "WV_062", "WV_073", "IR_087", "HRV") for bandname in AVAILABLE_CHANNELS: if bandname in trues: - self.assertTrue(available_chs[bandname]) + assert available_chs[bandname] else: - self.assertFalse(available_chs[bandname]) + assert not available_chs[bandname] available_chs = get_available_channels(TEST3_HEADER_CHNLIST) for bandname in AVAILABLE_CHANNELS: - self.assertTrue(available_chs[bandname]) + assert available_chs[bandname] TEST_HEADER_CALIB = { - 'RadiometricProcessing': { - 'Level15ImageCalibration': { - 'CalSlope': TestFileHandlerCalibrationBase.gains_nominal, - 'CalOffset': TestFileHandlerCalibrationBase.offsets_nominal, + "RadiometricProcessing": { + "Level15ImageCalibration": { + "CalSlope": TestFileHandlerCalibrationBase.gains_nominal, + "CalOffset": TestFileHandlerCalibrationBase.offsets_nominal, }, - 'MPEFCalFeedback': { - 'GSICSCalCoeff': TestFileHandlerCalibrationBase.gains_gsics, - 'GSICSOffsetCount': TestFileHandlerCalibrationBase.offsets_gsics + "MPEFCalFeedback": { + "GSICSCalCoeff": TestFileHandlerCalibrationBase.gains_gsics, + "GSICSOffsetCount": TestFileHandlerCalibrationBase.offsets_gsics } }, - 'ImageDescription': { - 'Level15ImageProduction': { - 'PlannedChanProcessing': TestFileHandlerCalibrationBase.radiance_types + "ImageDescription": { + "Level15ImageProduction": { + "PlannedChanProcessing": TestFileHandlerCalibrationBase.radiance_types } }, } @@ -797,29 +797,29 @@ def test_get_available_channels(self): class TestNativeMSGCalibration(TestFileHandlerCalibrationBase): """Unit tests for calibration.""" - @pytest.fixture(name='file_handler') + @pytest.fixture(name="file_handler") def file_handler(self): """Create a mocked file handler.""" header = { - '15_DATA_HEADER': { - 'ImageAcquisition': { - 'PlannedAcquisitionTime': { - 'TrueRepeatCycleStart': self.scan_time + "15_DATA_HEADER": { + "ImageAcquisition": { + "PlannedAcquisitionTime": { + "TrueRepeatCycleStart": self.scan_time } } } } trailer = { - '15TRAILER': { - 'ImageProductionStats': { - 'ActualScanningSummary': { - 'ForwardScanStart': self.scan_time + "15TRAILER": { + "ImageProductionStats": { + "ActualScanningSummary": { + "ForwardScanStart": self.scan_time } } } } - header['15_DATA_HEADER'].update(TEST_HEADER_CALIB) - with mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__', + header["15_DATA_HEADER"].update(TEST_HEADER_CALIB) + with mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__", return_value=None): fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fh.header = header @@ -828,34 +828,34 @@ def file_handler(self): return fh @pytest.mark.parametrize( - ('channel', 'calibration', 'calib_mode', 'use_ext_coefs'), - ( + ("channel", "calibration", "calib_mode", "use_ext_coefs"), + [ # VIS channel, internal coefficients - ('VIS006', 'counts', 'NOMINAL', False), - ('VIS006', 'radiance', 'NOMINAL', False), - ('VIS006', 'radiance', 'GSICS', False), - ('VIS006', 'reflectance', 'NOMINAL', False), + ("VIS006", "counts", "NOMINAL", False), + ("VIS006", "radiance", "NOMINAL", False), + ("VIS006", "radiance", "GSICS", False), + ("VIS006", "reflectance", "NOMINAL", False), # VIS channel, external coefficients (mode should have no effect) - ('VIS006', 'radiance', 'GSICS', True), - ('VIS006', 'reflectance', 'NOMINAL', True), + ("VIS006", "radiance", "GSICS", True), + ("VIS006", "reflectance", "NOMINAL", True), # IR channel, internal coefficients - ('IR_108', 'counts', 'NOMINAL', False), - ('IR_108', 'radiance', 'NOMINAL', False), - ('IR_108', 'radiance', 'GSICS', False), - ('IR_108', 'brightness_temperature', 'NOMINAL', False), - ('IR_108', 'brightness_temperature', 'GSICS', False), + ("IR_108", "counts", "NOMINAL", False), + ("IR_108", "radiance", "NOMINAL", False), + ("IR_108", "radiance", "GSICS", False), + ("IR_108", "brightness_temperature", "NOMINAL", False), + ("IR_108", "brightness_temperature", "GSICS", False), # IR channel, external coefficients (mode should have no effect) - ('IR_108', 'radiance', 'NOMINAL', True), - ('IR_108', 'brightness_temperature', 'GSICS', True), + ("IR_108", "radiance", "NOMINAL", True), + ("IR_108", "brightness_temperature", "GSICS", True), # HRV channel, internal coefficiens - ('HRV', 'counts', 'NOMINAL', False), - ('HRV', 'radiance', 'NOMINAL', False), - ('HRV', 'radiance', 'GSICS', False), - ('HRV', 'reflectance', 'NOMINAL', False), + ("HRV", "counts", "NOMINAL", False), + ("HRV", "radiance", "NOMINAL", False), + ("HRV", "radiance", "GSICS", False), + ("HRV", "reflectance", "NOMINAL", False), # HRV channel, external coefficients (mode should have no effect) - ('HRV', 'radiance', 'GSICS', True), - ('HRV', 'reflectance', 'NOMINAL', True), - ) + ("HRV", "radiance", "GSICS", True), + ("HRV", "reflectance", "NOMINAL", True), + ] ) def test_calibrate( self, file_handler, counts, channel, calibration, calib_mode, @@ -882,37 +882,37 @@ def test_calibrate( class TestNativeMSGDataset: """Tests for getting the dataset.""" - @pytest.fixture + @pytest.fixture() def file_handler(self): """Create a file handler for testing.""" trailer = { - '15TRAILER': { - 'ImageProductionStats': { - 'ActualScanningSummary': { - 'ForwardScanStart': datetime(2006, 1, 1, 12, 15, 9, 304888), - 'ForwardScanEnd': datetime(2006, 1, 1, 12, 27, 9, 304888), - 'ReducedScan': 0 + "15TRAILER": { + "ImageProductionStats": { + "ActualScanningSummary": { + "ForwardScanStart": datetime(2006, 1, 1, 12, 15, 9, 304888), + "ForwardScanEnd": datetime(2006, 1, 1, 12, 27, 9, 304888), + "ReducedScan": 0 } } } } mda = { - 'channel_list': ['VIS006', 'IR_108'], - 'number_of_lines': 4, - 'number_of_columns': 4, - 'is_full_disk': True, - 'platform_name': 'MSG-3', - 'offset_corrected': True, - 'projection_parameters': { - 'ssp_longitude': 0.0, - 'h': 35785831.0, - 'a': 6378169.0, - 'b': 6356583.8 + "channel_list": ["VIS006", "IR_108"], + "number_of_lines": 4, + "number_of_columns": 4, + "is_full_disk": True, + "platform_name": "MSG-3", + "offset_corrected": True, + "projection_parameters": { + "ssp_longitude": 0.0, + "h": 35785831.0, + "a": 6378169.0, + "b": 6356583.8 } } header = self._fake_header() data = self._fake_data() - with mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__', + with mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__", return_value=None): fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) fh.header = header @@ -921,7 +921,7 @@ def file_handler(self): fh.dask_array = da.from_array(data) fh.platform_id = 324 fh.fill_disk = False - fh.calib_mode = 'NOMINAL' + fh.calib_mode = "NOMINAL" fh.ext_calib_coefs = {} fh.include_raw_metadata = False fh.mda_max_array_size = 100 @@ -930,32 +930,32 @@ def file_handler(self): @staticmethod def _fake_header(): header = { - '15_DATA_HEADER': { - 'SatelliteStatus': { - 'SatelliteDefinition': { - 'NominalLongitude': 0.0 + "15_DATA_HEADER": { + "SatelliteStatus": { + "SatelliteDefinition": { + "NominalLongitude": 0.0 }, - 'Orbit': { - 'OrbitPolynomial': ORBIT_POLYNOMIALS + "Orbit": { + "OrbitPolynomial": ORBIT_POLYNOMIALS } }, - 'ImageAcquisition': { - 'PlannedAcquisitionTime': { - 'TrueRepeatCycleStart': datetime(2006, 1, 1, 12, 15, 0, 0), - 'PlannedRepeatCycleEnd': datetime(2006, 1, 1, 12, 30, 0, 0), + "ImageAcquisition": { + "PlannedAcquisitionTime": { + "TrueRepeatCycleStart": datetime(2006, 1, 1, 12, 15, 0, 0), + "PlannedRepeatCycleEnd": datetime(2006, 1, 1, 12, 30, 0, 0), } } }, } - header['15_DATA_HEADER'].update(TEST_HEADER_CALIB) + header["15_DATA_HEADER"].update(TEST_HEADER_CALIB) return header @staticmethod def _fake_data(): num_visir_cols = 5 # will be divided by 1.25 -> 4 columns visir_rec = [ - ('line_data', np.uint8, (num_visir_cols,)), - ('acq_time', time_cds_short) + ("line_data", np.uint8, (num_visir_cols,)), + ("acq_time", time_cds_short) ] vis006_line1 = ( [1, 2, 3, 4, 5], # line_data @@ -973,26 +973,26 @@ def _fake_data(): [(vis006_line2,), (ir108_line2,)], [(vis006_line3,), (ir108_line3,)], [(vis006_line4,), (ir108_line4,)]], - dtype=[('visir', visir_rec)] + dtype=[("visir", visir_rec)] ) return data def test_get_dataset(self, file_handler): """Test getting the dataset.""" dataset_id = make_dataid( - name='VIS006', + name="VIS006", resolution=3000, - calibration='counts' + calibration="counts" ) dataset_info = { - 'units': '1', - 'wavelength': (1, 2, 3), - 'standard_name': 'counts' + "units": "1", + "wavelength": (1, 2, 3), + "standard_name": "counts" } xarr = file_handler.get_dataset(dataset_id, dataset_info) expected = self._exp_data_array() xr.testing.assert_equal(xarr, expected) - assert 'raw_metadata' not in xarr.attrs + assert "raw_metadata" not in xarr.attrs assert file_handler.start_time == datetime(2006, 1, 1, 12, 15, 0) assert file_handler.end_time == datetime(2006, 1, 1, 12, 30, 0) assert_attrs_equal(xarr.attrs, expected.attrs, tolerance=1e-4) @@ -1011,7 +1011,7 @@ def test_repeat_cycle_duration(self, file_handler): """Test repeat cycle handling for FD or ReduscedScan.""" assert 15 == file_handler._repeat_cycle_duration # Change the reducescan scenario to test the repeat cycle duration handling - file_handler.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] = 1 + file_handler.trailer["15TRAILER"]["ImageProductionStats"]["ActualScanningSummary"]["ReducedScan"] = 1 assert 5 == file_handler._repeat_cycle_duration @staticmethod @@ -1022,72 +1022,72 @@ def _exp_data_array(): [44., 192., 835., 527.], [64., 273., 132., 788.]], dtype=np.float32), - dims=['y', 'x'], + dims=["y", "x"], attrs={ - 'orbital_parameters': { - 'satellite_actual_longitude': -3.55117540817073, - 'satellite_actual_latitude': -0.5711243456528018, - 'satellite_actual_altitude': 35783296.150123544, - 'satellite_nominal_longitude': 0.0, - 'satellite_nominal_latitude': 0.0, - 'projection_longitude': 0.0, - 'projection_latitude': 0.0, - 'projection_altitude': 35785831.0 + "orbital_parameters": { + "satellite_actual_longitude": -3.55117540817073, + "satellite_actual_latitude": -0.5711243456528018, + "satellite_actual_altitude": 35783296.150123544, + "satellite_nominal_longitude": 0.0, + "satellite_nominal_latitude": 0.0, + "projection_longitude": 0.0, + "projection_latitude": 0.0, + "projection_altitude": 35785831.0 }, - 'time_parameters': { - 'nominal_start_time': datetime(2006, 1, 1, 12, 15, 0), - 'nominal_end_time': datetime(2006, 1, 1, 12, 30, 0), - 'observation_start_time': datetime(2006, 1, 1, 12, 15, 9, 304888), - 'observation_end_time': datetime(2006, 1, 1, 12, 27, 9, 304888), + "time_parameters": { + "nominal_start_time": datetime(2006, 1, 1, 12, 15, 0), + "nominal_end_time": datetime(2006, 1, 1, 12, 30, 0), + "observation_start_time": datetime(2006, 1, 1, 12, 15, 9, 304888), + "observation_end_time": datetime(2006, 1, 1, 12, 27, 9, 304888), }, - 'georef_offset_corrected': True, - 'platform_name': 'MSG-3', - 'sensor': 'seviri', - 'units': '1', - 'wavelength': (1, 2, 3), - 'standard_name': 'counts', + "georef_offset_corrected": True, + "platform_name": "MSG-3", + "sensor": "seviri", + "units": "1", + "wavelength": (1, 2, 3), + "standard_name": "counts", } ) - expected['acq_time'] = ('y', [np.datetime64('1958-01-02 00:00:01'), - np.datetime64('1958-01-02 00:00:02'), - np.datetime64('1958-01-02 00:00:03'), - np.datetime64('1958-01-02 00:00:04')]) + expected["acq_time"] = ("y", [np.datetime64("1958-01-02 00:00:01"), + np.datetime64("1958-01-02 00:00:02"), + np.datetime64("1958-01-02 00:00:03"), + np.datetime64("1958-01-02 00:00:04")]) return expected def test_get_dataset_with_raw_metadata(self, file_handler): """Test provision of raw metadata.""" file_handler.include_raw_metadata = True dataset_id = make_dataid( - name='VIS006', + name="VIS006", resolution=3000, - calibration='counts' + calibration="counts" ) dataset_info = { - 'units': '1', - 'wavelength': (1, 2, 3), - 'standard_name': 'counts' + "units": "1", + "wavelength": (1, 2, 3), + "standard_name": "counts" } xarr = file_handler.get_dataset(dataset_id, dataset_info) - assert 'raw_metadata' in xarr.attrs + assert "raw_metadata" in xarr.attrs def test_satpos_no_valid_orbit_polynomial(self, file_handler): """Test satellite position if there is no valid orbit polynomial.""" - file_handler.header['15_DATA_HEADER']['SatelliteStatus'][ - 'Orbit']['OrbitPolynomial'] = ORBIT_POLYNOMIALS_INVALID + file_handler.header["15_DATA_HEADER"]["SatelliteStatus"][ + "Orbit"]["OrbitPolynomial"] = ORBIT_POLYNOMIALS_INVALID dataset_id = make_dataid( - name='VIS006', + name="VIS006", resolution=3000, - calibration='counts' + calibration="counts" ) dataset_info = { - 'units': '1', - 'wavelength': (1, 2, 3), - 'standard_name': 'counts' + "units": "1", + "wavelength": (1, 2, 3), + "standard_name": "counts" } with pytest.warns(UserWarning, match="No orbit polynomial"): xarr = file_handler.get_dataset(dataset_id, dataset_info) - assert 'satellite_actual_longitude' not in xarr.attrs[ - 'orbital_parameters'] + assert "satellite_actual_longitude" not in xarr.attrs[ + "orbital_parameters"] class TestNativeMSGPadder(unittest.TestCase): @@ -1096,12 +1096,12 @@ class TestNativeMSGPadder(unittest.TestCase): @staticmethod def prepare_padder(test_dict): """Initialize Padder and pad test data.""" - dataset_id = test_dict['dataset_id'] - img_bounds = test_dict['img_bounds'] - is_full_disk = test_dict['is_full_disk'] - dataset = test_dict['dataset'] - final_shape = test_dict['final_shape'] - expected_padded_data = test_dict['expected_padded_data'] + dataset_id = test_dict["dataset_id"] + img_bounds = test_dict["img_bounds"] + is_full_disk = test_dict["is_full_disk"] + dataset = test_dict["dataset"] + final_shape = test_dict["final_shape"] + expected_padded_data = test_dict["expected_padded_data"] padder = Padder(dataset_id, img_bounds, is_full_disk) padder._final_shape = final_shape @@ -1123,7 +1123,7 @@ def test_padder_fes_hrv(self): class TestNativeMSGFilenames: """Test identification of Native format filenames.""" - @pytest.fixture + @pytest.fixture() def reader(self): """Return reader for SEVIRI Native format.""" from satpy._config import config_search_paths @@ -1150,56 +1150,56 @@ def test_file_pattern(self, reader): @pytest.mark.parametrize( - 'file_content,exp_header_size', - ( + ("file_content", "exp_header_size"), + [ (ASCII_STARTSWITH, 450400), # with ascii header - (b'foobar', 445286), # without ascii header - ) + (b"foobar", 445286), # without ascii header + ] ) def test_header_type(file_content, exp_header_size): """Test identification of the file header type.""" header = create_test_header( - dataset_id=make_dataid(name='VIS006', resolution=3000), + dataset_id=make_dataid(name="VIS006", resolution=3000), earth_model=1, is_full_disk=True, is_rapid_scan=0 ) - if file_content == b'foobar': - header.pop('15_SECONDARY_PRODUCT_HEADER') - with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ - mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ + if file_content == b"foobar": + header.pop("15_SECONDARY_PRODUCT_HEADER") + with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile, \ + mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap") as _get_memmap, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch("builtins.open", mock.mock_open(read_data=file_content)): fromfile.return_value = header recarray2dict.side_effect = (lambda x: x) _get_memmap.return_value = np.arange(3) fh = NativeMSGFileHandler(filename=None, filename_info={}, filetype_info=None) assert fh.header_type.itemsize == exp_header_size - assert '15_SECONDARY_PRODUCT_HEADER' in fh.header + assert "15_SECONDARY_PRODUCT_HEADER" in fh.header def test_header_warning(): """Test warning is raised for NOK quality flag.""" header_good = create_test_header( - dataset_id=make_dataid(name='VIS006', resolution=3000), + dataset_id=make_dataid(name="VIS006", resolution=3000), earth_model=1, is_full_disk=True, is_rapid_scan=0, - good_qual='OK' + good_qual="OK" ) header_bad = create_test_header( - dataset_id=make_dataid(name='VIS006', resolution=3000), + dataset_id=make_dataid(name="VIS006", resolution=3000), earth_model=1, is_full_disk=True, is_rapid_scan=0, - good_qual='NOK' + good_qual="NOK" ) - with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \ - mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \ - mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \ + with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile, \ + mock.patch("satpy.readers.seviri_l1b_native.recarray2dict") as recarray2dict, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap") as _get_memmap, \ + mock.patch("satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer"), \ mock.patch("builtins.open", mock.mock_open(read_data=ASCII_STARTSWITH)): recarray2dict.side_effect = (lambda x: x) _get_memmap.return_value = np.arange(3) @@ -1217,30 +1217,30 @@ def test_header_warning(): # check that without Main Header the code doesn't crash header_missing = header_good.copy() - header_missing.pop('15_MAIN_PRODUCT_HEADER') + header_missing.pop("15_MAIN_PRODUCT_HEADER") fromfile.return_value = header_missing with warnings.catch_warnings(): warnings.simplefilter("error") - NativeMSGFileHandler('myfile', {}, None) + NativeMSGFileHandler("myfile", {}, None) @pytest.mark.parametrize( - "starts_with, expected", + ("starts_with", "expected"), [ (ASCII_STARTSWITH, True), - (b'this_shall_fail', False) + (b"this_shall_fail", False) ] ) def test_has_archive_header(starts_with, expected): """Test if the file includes an ASCII archive header.""" with mock.patch("builtins.open", mock.mock_open(read_data=starts_with)): - actual = has_archive_header('filename') + actual = has_archive_header("filename") assert actual == expected def test_read_header(): """Test that reading header returns the header correctly converted to a dictionary.""" - keys = ('SatelliteId', 'NominalLongitude', 'SatelliteStatus') + keys = ("SatelliteId", "NominalLongitude", "SatelliteStatus") values = (324, 0.0, 1) expected = dict(zip(keys, values)) @@ -1248,7 +1248,7 @@ def test_read_header(): dtypes = np.dtype([(k, t) for k, t in zip(keys, types)]) hdr_data = np.array([values], dtype=dtypes) - with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile: + with mock.patch("satpy.readers.seviri_l1b_native.np.fromfile") as fromfile: fromfile.return_value = hdr_data actual = recarray2dict(hdr_data) assert actual == expected diff --git a/satpy/tests/reader_tests/test_seviri_l1b_nc.py b/satpy/tests/reader_tests/test_seviri_l1b_nc.py index f85e9f5aae..f6a54aa60e 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_nc.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_nc.py @@ -29,17 +29,17 @@ from satpy.tests.reader_tests.test_seviri_l1b_calibration import TestFileHandlerCalibrationBase from satpy.tests.utils import assert_attrs_equal, make_dataid -channel_keys_dict = {'VIS006': 'ch1', 'IR_108': 'ch9'} +channel_keys_dict = {"VIS006": "ch1", "IR_108": "ch9"} def to_cds_time(time): """Convert datetime to (days, msecs) since 1958-01-01.""" if isinstance(time, datetime): time = np.datetime64(time) - t0 = np.datetime64('1958-01-01 00:00') + t0 = np.datetime64("1958-01-01 00:00") delta = time - t0 - days = (delta / np.timedelta64(1, 'D')).astype(int) - msecs = delta / np.timedelta64(1, 'ms') - days * 24 * 3600 * 1E3 + days = (delta / np.timedelta64(1, "D")).astype(int) + msecs = delta / np.timedelta64(1, "ms") - days * 24 * 3600 * 1E3 return days, msecs @@ -64,171 +64,171 @@ def _get_fake_dataset(self, counts, h5netcdf): orbit_poly_start_day, orbit_poly_start_msec = to_cds_time( np.array([datetime(2019, 12, 31, 18), datetime(2019, 12, 31, 22)], - dtype='datetime64') + dtype="datetime64") ) orbit_poly_end_day, orbit_poly_end_msec = to_cds_time( np.array([datetime(2019, 12, 31, 22), datetime(2020, 1, 1, 2)], - dtype='datetime64') + dtype="datetime64") ) counts = counts.rename({ - 'y': 'num_rows_vis_ir', - 'x': 'num_columns_vis_ir' + "y": "num_rows_vis_ir", + "x": "num_columns_vis_ir" }) scan_time_days, scan_time_msecs = to_cds_time(self.scan_time) ds = xr.Dataset( { - 'ch1': counts.copy(), - 'ch9': counts.copy(), - 'HRV': (('num_rows_hrv', 'num_columns_hrv'), [[1, 2, 3], + "ch1": counts.copy(), + "ch9": counts.copy(), + "HRV": (("num_rows_hrv", "num_columns_hrv"), [[1, 2, 3], [4, 5, 6], [7, 8, 9]]), - 'planned_chan_processing': self.radiance_types, - 'channel_data_visir_data_l10_line_mean_acquisition_time_day': ( - ('num_rows_vis_ir', 'channels_vis_ir_dim'), + "planned_chan_processing": self.radiance_types, + "channel_data_visir_data_l10_line_mean_acquisition_time_day": ( + ("num_rows_vis_ir", "channels_vis_ir_dim"), acq_time_day ), - 'channel_data_visir_data_l10_line_mean_acquisition_msec': ( - ('num_rows_vis_ir', 'channels_vis_ir_dim'), + "channel_data_visir_data_l10_line_mean_acquisition_msec": ( + ("num_rows_vis_ir", "channels_vis_ir_dim"), acq_time_msec ), - 'channel_data_visir_data_line_validity': ( - ('num_rows_vis_ir', 'channels_vis_ir_dim'), + "channel_data_visir_data_line_validity": ( + ("num_rows_vis_ir", "channels_vis_ir_dim"), line_validity ), - 'channel_data_visir_data_line_geometric_quality': ( - ('num_rows_vis_ir', 'channels_vis_ir_dim'), + "channel_data_visir_data_line_geometric_quality": ( + ("num_rows_vis_ir", "channels_vis_ir_dim"), line_geom_radio_quality ), - 'channel_data_visir_data_line_radiometric_quality': ( - ('num_rows_vis_ir', 'channels_vis_ir_dim'), + "channel_data_visir_data_line_radiometric_quality": ( + ("num_rows_vis_ir", "channels_vis_ir_dim"), line_geom_radio_quality ), - 'orbit_polynomial_x': ( - ('orbit_polynomial_dim_row', - 'orbit_polynomial_dim_col'), - ORBIT_POLYNOMIALS['X'][0:2] + "orbit_polynomial_x": ( + ("orbit_polynomial_dim_row", + "orbit_polynomial_dim_col"), + ORBIT_POLYNOMIALS["X"][0:2] ), - 'orbit_polynomial_y': ( - ('orbit_polynomial_dim_row', - 'orbit_polynomial_dim_col'), - ORBIT_POLYNOMIALS['Y'][0:2] + "orbit_polynomial_y": ( + ("orbit_polynomial_dim_row", + "orbit_polynomial_dim_col"), + ORBIT_POLYNOMIALS["Y"][0:2] ), - 'orbit_polynomial_z': ( - ('orbit_polynomial_dim_row', - 'orbit_polynomial_dim_col'), - ORBIT_POLYNOMIALS['Z'][0:2] + "orbit_polynomial_z": ( + ("orbit_polynomial_dim_row", + "orbit_polynomial_dim_col"), + ORBIT_POLYNOMIALS["Z"][0:2] ), - 'orbit_polynomial_start_time_day': ( - 'orbit_polynomial_dim_row', + "orbit_polynomial_start_time_day": ( + "orbit_polynomial_dim_row", orbit_poly_start_day ), - 'orbit_polynomial_start_time_msec': ( - 'orbit_polynomial_dim_row', + "orbit_polynomial_start_time_msec": ( + "orbit_polynomial_dim_row", orbit_poly_start_msec ), - 'orbit_polynomial_end_time_day': ( - 'orbit_polynomial_dim_row', + "orbit_polynomial_end_time_day": ( + "orbit_polynomial_dim_row", orbit_poly_end_day ), - 'orbit_polynomial_end_time_msec': ( - 'orbit_polynomial_dim_row', + "orbit_polynomial_end_time_msec": ( + "orbit_polynomial_dim_row", orbit_poly_end_msec ), }, attrs={ - 'equatorial_radius': 6378.169, - 'north_polar_radius': 6356.5838, - 'south_polar_radius': 6356.5838, - 'longitude_of_SSP': 0.0, - 'nominal_longitude': -3.5, - 'satellite_id': self.platform_id, - 'true_repeat_cycle_start_day': scan_time_days, - 'true_repeat_cycle_start_mi_sec': scan_time_msecs, - 'planned_repeat_cycle_end_day': scan_time_days, - 'planned_repeat_cycle_end_mi_sec': scan_time_msecs, - 'north_most_line': 3712, - 'east_most_pixel': 1, - 'west_most_pixel': 3712, - 'south_most_line': 1, - 'vis_ir_grid_origin': 0, - 'vis_ir_column_dir_grid_step': 3.0004032, - 'vis_ir_line_dir_grid_step': 3.0004032, - 'type_of_earth_model': '0x02', - 'nominal_image_scanning': 'T', + "equatorial_radius": 6378.169, + "north_polar_radius": 6356.5838, + "south_polar_radius": 6356.5838, + "longitude_of_SSP": 0.0, + "nominal_longitude": -3.5, + "satellite_id": self.platform_id, + "true_repeat_cycle_start_day": scan_time_days, + "true_repeat_cycle_start_mi_sec": scan_time_msecs, + "planned_repeat_cycle_end_day": scan_time_days, + "planned_repeat_cycle_end_mi_sec": scan_time_msecs, + "north_most_line": 3712, + "east_most_pixel": 1, + "west_most_pixel": 3712, + "south_most_line": 1, + "vis_ir_grid_origin": 0, + "vis_ir_column_dir_grid_step": 3.0004032, + "vis_ir_line_dir_grid_step": 3.0004032, + "type_of_earth_model": "0x02", + "nominal_image_scanning": "T", } ) if h5netcdf: - nattrs = {'equatorial_radius': np.array([6378.169]), - 'north_polar_radius': np.array([6356.5838]), - 'south_polar_radius': np.array([6356.5838]), - 'longitude_of_SSP': np.array([0.0]), - 'vis_ir_column_dir_grid_step': np.array([3.0004032]), - 'vis_ir_line_dir_grid_step': np.array([3.0004032]) + nattrs = {"equatorial_radius": np.array([6378.169]), + "north_polar_radius": np.array([6356.5838]), + "south_polar_radius": np.array([6356.5838]), + "longitude_of_SSP": np.array([0.0]), + "vis_ir_column_dir_grid_step": np.array([3.0004032]), + "vis_ir_line_dir_grid_step": np.array([3.0004032]) } ds.attrs.update(nattrs) - ds['ch1'].attrs.update({ - 'scale_factor': self.gains_nominal[0], - 'add_offset': self.offsets_nominal[0] + ds["ch1"].attrs.update({ + "scale_factor": self.gains_nominal[0], + "add_offset": self.offsets_nominal[0] }) # IR_108 is dataset with key ch9 - ds['ch9'].attrs.update({ - 'scale_factor': self.gains_nominal[8], - 'add_offset': self.offsets_nominal[8], + ds["ch9"].attrs.update({ + "scale_factor": self.gains_nominal[8], + "add_offset": self.offsets_nominal[8], }) # Add some attributes so that the reader can strip them strip_attrs = { - 'comment': None, - 'long_name': None, - 'valid_min': None, - 'valid_max': None + "comment": None, + "long_name": None, + "valid_min": None, + "valid_max": None } - for name in ['ch1', 'ch9']: + for name in ["ch1", "ch9"]: ds[name].attrs.update(strip_attrs) return ds - @pytest.fixture + @pytest.fixture() def h5netcdf(self): """Fixture for xr backend choice.""" return False - @pytest.fixture(name='file_handler') + @pytest.fixture(name="file_handler") def file_handler(self, counts, h5netcdf): """Create a mocked file handler.""" with mock.patch( - 'satpy.readers.seviri_l1b_nc.open_dataset', + "satpy.readers.seviri_l1b_nc.open_dataset", return_value=self._get_fake_dataset(counts=counts, h5netcdf=h5netcdf) ): return NCSEVIRIFileHandler( - 'filename', - {'platform_shortname': 'MSG3', - 'start_time': self.scan_time, - 'service': 'MSG'}, - {'filetype': 'info'} + "filename", + {"platform_shortname": "MSG3", + "start_time": self.scan_time, + "service": "MSG"}, + {"filetype": "info"} ) @pytest.mark.parametrize( - ('channel', 'calibration', 'use_ext_coefs'), + ("channel", "calibration", "use_ext_coefs"), [ # VIS channel, internal coefficients - ('VIS006', 'counts', False), - ('VIS006', 'radiance', False), - ('VIS006', 'reflectance', False), + ("VIS006", "counts", False), + ("VIS006", "radiance", False), + ("VIS006", "reflectance", False), # VIS channel, external coefficients - ('VIS006', 'radiance', True), - ('VIS006', 'reflectance', True), + ("VIS006", "radiance", True), + ("VIS006", "reflectance", True), # IR channel, internal coefficients - ('IR_108', 'counts', False), - ('IR_108', 'radiance', False), - ('IR_108', 'brightness_temperature', False), + ("IR_108", "counts", False), + ("IR_108", "radiance", False), + ("IR_108", "brightness_temperature", False), # IR channel, external coefficients - ('IR_108', 'radiance', True), - ('IR_108', 'brightness_temperature', True), + ("IR_108", "radiance", True), + ("IR_108", "brightness_temperature", True), # FUTURE: Enable once HRV reading has been fixed. # # HRV channel, internal coefficiens # ('HRV', 'counts', False), @@ -247,7 +247,7 @@ def test_calibrate( expected = self._get_expected( channel=channel, calibration=calibration, - calib_mode='NOMINAL', + calib_mode="NOMINAL", use_ext_coefs=use_ext_coefs ) fh = file_handler @@ -261,35 +261,35 @@ def test_calibrate( def test_mask_bad_quality(self, file_handler): """Test masking of bad quality scan lines.""" - channel = 'VIS006' + channel = "VIS006" key = channel_keys_dict[channel] dataset_info = { - 'nc_key': key, - 'units': 'units', - 'wavelength': 'wavelength', - 'standard_name': 'standard_name' + "nc_key": key, + "units": "units", + "wavelength": "wavelength", + "standard_name": "standard_name" } expected = self._get_expected( channel=channel, - calibration='radiance', - calib_mode='NOMINAL', + calibration="radiance", + calib_mode="NOMINAL", use_ext_coefs=False ) fh = file_handler res = fh._mask_bad_quality(fh.nc[key], dataset_info) - new_data = np.zeros_like(expected.data).astype('float32') + new_data = np.zeros_like(expected.data).astype("float32") new_data[:, :] = np.nan expected = expected.copy(data=new_data) xr.testing.assert_allclose(res, expected) @pytest.mark.parametrize( - ('channel', 'calibration', 'mask_bad_quality_scan_lines'), + ("channel", "calibration", "mask_bad_quality_scan_lines"), [ - ('VIS006', 'reflectance', True), - ('VIS006', 'reflectance', False), - ('IR_108', 'brightness_temperature', True) + ("VIS006", "reflectance", True), + ("VIS006", "reflectance", False), + ("IR_108", "brightness_temperature", True) ] ) def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_scan_lines): @@ -297,10 +297,10 @@ def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_ dataset_id = make_dataid(name=channel, calibration=calibration) key = channel_keys_dict[channel] dataset_info = { - 'nc_key': key, - 'units': 'units', - 'wavelength': 'wavelength', - 'standard_name': 'standard_name' + "nc_key": key, + "units": "units", + "wavelength": "wavelength", + "standard_name": "standard_name" } file_handler.mask_bad_quality_scan_lines = mask_bad_quality_scan_lines @@ -310,43 +310,43 @@ def test_get_dataset(self, file_handler, channel, calibration, mask_bad_quality_ expected = self._get_expected( channel=channel, calibration=calibration, - calib_mode='NOMINAL', + calib_mode="NOMINAL", use_ext_coefs=False ) expected.attrs = { - 'orbital_parameters': { - 'satellite_actual_longitude': -3.541742131915741, - 'satellite_actual_latitude': -0.5203765167594427, - 'satellite_actual_altitude': 35783419.16135868, - 'satellite_nominal_longitude': -3.5, - 'satellite_nominal_latitude': 0.0, - 'projection_longitude': 0.0, - 'projection_latitude': 0.0, - 'projection_altitude': 35785831.0 + "orbital_parameters": { + "satellite_actual_longitude": -3.541742131915741, + "satellite_actual_latitude": -0.5203765167594427, + "satellite_actual_altitude": 35783419.16135868, + "satellite_nominal_longitude": -3.5, + "satellite_nominal_latitude": 0.0, + "projection_longitude": 0.0, + "projection_latitude": 0.0, + "projection_altitude": 35785831.0 }, - 'time_parameters': { - 'nominal_start_time': datetime(2020, 1, 1, 0, 0), - 'nominal_end_time': datetime(2020, 1, 1, 0, 0), - 'observation_start_time': datetime(2020, 1, 1, 0, 0), - 'observation_end_time': datetime(2020, 1, 1, 0, 0), + "time_parameters": { + "nominal_start_time": datetime(2020, 1, 1, 0, 0), + "nominal_end_time": datetime(2020, 1, 1, 0, 0), + "observation_start_time": datetime(2020, 1, 1, 0, 0), + "observation_end_time": datetime(2020, 1, 1, 0, 0), }, - 'georef_offset_corrected': True, - 'platform_name': 'Meteosat-11', - 'sensor': 'seviri', - 'units': 'units', - 'wavelength': 'wavelength', - 'standard_name': 'standard_name' + "georef_offset_corrected": True, + "platform_name": "Meteosat-11", + "sensor": "seviri", + "units": "units", + "wavelength": "wavelength", + "standard_name": "standard_name" } - expected['acq_time'] = ('y', [np.datetime64('1958-01-02 00:00:01'), - np.datetime64('1958-01-02 00:00:02')]) + expected["acq_time"] = ("y", [np.datetime64("1958-01-02 00:00:01"), + np.datetime64("1958-01-02 00:00:02")]) expected = expected[::-1] # reader flips data upside down if mask_bad_quality_scan_lines: expected = file_handler._mask_bad_quality(expected, dataset_info) xr.testing.assert_allclose(res, expected) - for key in ['sun_earth_distance_correction_applied', - 'sun_earth_distance_correction_factor']: + for key in ["sun_earth_distance_correction_applied", + "sun_earth_distance_correction_factor"]: res.attrs.pop(key, None) assert_attrs_equal(res.attrs, expected.attrs, tolerance=1e-4) @@ -364,29 +364,29 @@ def test_repeat_cycle_duration(self, file_handler): """Test repeat cycle handling for FD or ReduscedScan.""" assert 15 == file_handler._repeat_cycle_duration # Change the reducescan scenario to test the repeat cycle duration handling - file_handler.nc.attrs['nominal_image_scanning'] = '' - file_handler.nc.attrs['reduced_scanning'] = 'T' + file_handler.nc.attrs["nominal_image_scanning"] = "" + file_handler.nc.attrs["reduced_scanning"] = "T" # file_handler.trailer['15TRAILER']['ImageProductionStats']['ActualScanningSummary']['ReducedScan'] = 1 assert 5 == file_handler._repeat_cycle_duration def test_satpos_no_valid_orbit_polynomial(self, file_handler): """Test satellite position if there is no valid orbit polynomial.""" - dataset_id = make_dataid(name='VIS006', calibration='counts') + dataset_id = make_dataid(name="VIS006", calibration="counts") dataset_info = { - 'name': 'VIS006', - 'nc_key': 'ch1', - 'units': 'units', - 'wavelength': 'wavelength', - 'standard_name': 'standard_name' + "name": "VIS006", + "nc_key": "ch1", + "units": "units", + "wavelength": "wavelength", + "standard_name": "standard_name" } - file_handler.nc['orbit_polynomial_start_time_day'] = 0 - file_handler.nc['orbit_polynomial_end_time_day'] = 0 + file_handler.nc["orbit_polynomial_start_time_day"] = 0 + file_handler.nc["orbit_polynomial_end_time_day"] = 0 res = file_handler.get_dataset(dataset_id, dataset_info) - assert 'satellite_actual_longitude' not in res.attrs[ - 'orbital_parameters'] + assert "satellite_actual_longitude" not in res.attrs[ + "orbital_parameters"] - @pytest.mark.parametrize('h5netcdf', [True]) + @pytest.mark.parametrize("h5netcdf", [True]) def test_h5netcdf_pecularity(self, file_handler, h5netcdf): """Test conversion of attributes when xarray is used with h5netcdf backend.""" fh = file_handler - assert isinstance(fh.mda['projection_parameters']['a'], float) + assert isinstance(fh.mda["projection_parameters"]["a"], float) diff --git a/satpy/tests/reader_tests/test_seviri_l2_bufr.py b/satpy/tests/reader_tests/test_seviri_l2_bufr.py index 3578645e5b..ec3fdf7b56 100644 --- a/satpy/tests/reader_tests/test_seviri_l2_bufr.py +++ b/satpy/tests/reader_tests/test_seviri_l2_bufr.py @@ -29,83 +29,83 @@ from satpy.tests.utils import make_dataid -FILETYPE_INFO = {'file_type': 'seviri_l2_bufr_asr'} +FILETYPE_INFO = {"file_type": "seviri_l2_bufr_asr"} -FILENAME_INFO = {'start_time': '20191112000000', - 'spacecraft': 'MSG2'} -FILENAME_INFO2 = {'start_time': '20191112000000', - 'spacecraft': 'MSG2', - 'server': 'TESTSERVER'} +FILENAME_INFO = {"start_time": "20191112000000", + "spacecraft": "MSG2"} +FILENAME_INFO2 = {"start_time": "20191112000000", + "spacecraft": "MSG2", + "server": "TESTSERVER"} MPEF_PRODUCT_HEADER = { - 'NominalTime': datetime(2019, 11, 6, 18, 0), - 'SpacecraftName': '09', - 'RectificationLongitude': 'E0455' + "NominalTime": datetime(2019, 11, 6, 18, 0), + "SpacecraftName": "09", + "RectificationLongitude": "E0455" } DATASET_INFO = { - 'name': 'testdata', - 'key': '#1#brightnessTemperature', - 'coordinates': ('longitude', 'latitude'), - 'fill_value': 0 + "name": "testdata", + "key": "#1#brightnessTemperature", + "coordinates": ("longitude", "latitude"), + "fill_value": 0 } DATASET_INFO_LAT = { - 'name': 'latitude', - 'key': '#1#latitude', - 'fill_value': -1.e+100 + "name": "latitude", + "key": "#1#latitude", + "fill_value": -1.e+100 } DATASET_INFO_LON = { - 'name': 'longitude', - 'key': '#1#longitude', - 'fill_value': -1.e+100 + "name": "longitude", + "key": "#1#longitude", + "fill_value": -1.e+100 } DATASET_ATTRS = { - 'platform_name': 'MET09', - 'ssp_lon': 45.5, - 'seg_size': 16 + "platform_name": "MET09", + "ssp_lon": 45.5, + "seg_size": 16 } AREA_DEF = geometry.AreaDefinition( - 'msg_seviri_iodc_48km', - 'MSG SEVIRI Indian Ocean Data Coverage service area definition with 48 km resolution', + "msg_seviri_iodc_48km", + "MSG SEVIRI Indian Ocean Data Coverage service area definition with 48 km resolution", "", - {'a': 6378169., 'b': 6356583.8, 'lon_0': DATASET_ATTRS['ssp_lon'], - 'h': 35785831., 'proj': 'geos', 'units': 'm'}, + {"a": 6378169., "b": 6356583.8, "lon_0": DATASET_ATTRS["ssp_lon"], + "h": 35785831., "proj": "geos", "units": "m"}, 232, 232, (-5570248.6867, -5567248.2834, 5567248.2834, 5570248.6867) ) AREA_DEF_FES = geometry.AreaDefinition( - 'msg_seviri_res_48km', - 'MSG SEVIRI Full Earth Scanning service area definition with 48 km resolution', + "msg_seviri_res_48km", + "MSG SEVIRI Full Earth Scanning service area definition with 48 km resolution", "", - {'a': 6378169., 'b': 6356583.8, 'lon_0': 0.0, - 'h': 35785831., 'proj': 'geos', 'units': 'm'}, + {"a": 6378169., "b": 6356583.8, "lon_0": 0.0, + "h": 35785831., "proj": "geos", "units": "m"}, 232, 232, (-5570248.6867, -5567248.2834, 5567248.2834, 5570248.6867) ) AREA_DEF_EXT = geometry.AreaDefinition( - 'msg_seviri_iodc_9km_ext', - 'MSG SEVIRI Indian Ocean Data Coverage service area definition with 9 km resolution ' - '(extended outside original 3km grid)', + "msg_seviri_iodc_9km_ext", + "MSG SEVIRI Indian Ocean Data Coverage service area definition with 9 km resolution " + "(extended outside original 3km grid)", "", - {'a': 6378169., 'b': 6356583.8, 'lon_0': DATASET_ATTRS['ssp_lon'], - 'h': 35785831., 'proj': 'geos', 'units': 'm'}, + {"a": 6378169., "b": 6356583.8, "lon_0": DATASET_ATTRS["ssp_lon"], + "h": 35785831., "proj": "geos", "units": "m"}, 1238, 1238, (-5571748.8883, -5571748.8882, 5571748.8882, 5571748.8883) ) TEST_FILES = [ - 'ASRBUFRProd_20191106130000Z_00_OMPEFS02_MET09_FES_E0000', - 'MSG2-SEVI-MSGASRE-0101-0101-20191106130000.000000000Z-20191106131702-1362128.bfr', - 'MSG2-SEVI-MSGASRE-0101-0101-20191106101500.000000000Z-20191106103218-1362148' + "ASRBUFRProd_20191106130000Z_00_OMPEFS02_MET09_FES_E0000", + "MSG2-SEVI-MSGASRE-0101-0101-20191106130000.000000000Z-20191106131702-1362128.bfr", + "MSG2-SEVI-MSGASRE-0101-0101-20191106101500.000000000Z-20191106103218-1362148" ] # Test data @@ -117,30 +117,30 @@ class SeviriL2BufrData: """Mock SEVIRI L2 BUFR data.""" - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") - def __init__(self, filename, with_adef=False, rect_lon='default'): + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") + def __init__(self, filename, with_adef=False, rect_lon="default"): """Initialize by mocking test data for testing the SEVIRI L2 BUFR reader.""" import eccodes as ec from satpy.readers.seviri_l2_bufr import SeviriL2BufrFileHandler - self.buf1 = ec.codes_bufr_new_from_samples('BUFR4_local_satellite') - ec.codes_set(self.buf1, 'unpack', 1) + self.buf1 = ec.codes_bufr_new_from_samples("BUFR4_local_satellite") + ec.codes_set(self.buf1, "unpack", 1) # write the bufr test data twice as we want to read in and then concatenate the data in the reader # 55 id corresponds to METEOSAT 8` - ec.codes_set(self.buf1, 'satelliteIdentifier', 56) - ec.codes_set_array(self.buf1, '#1#latitude', LAT) - ec.codes_set_array(self.buf1, '#1#latitude', LAT) - ec.codes_set_array(self.buf1, '#1#longitude', LON) - ec.codes_set_array(self.buf1, '#1#longitude', LON) - ec.codes_set_array(self.buf1, '#1#brightnessTemperature', DATA) - ec.codes_set_array(self.buf1, '#1#brightnessTemperature', DATA) + ec.codes_set(self.buf1, "satelliteIdentifier", 56) + ec.codes_set_array(self.buf1, "#1#latitude", LAT) + ec.codes_set_array(self.buf1, "#1#latitude", LAT) + ec.codes_set_array(self.buf1, "#1#longitude", LON) + ec.codes_set_array(self.buf1, "#1#longitude", LON) + ec.codes_set_array(self.buf1, "#1#brightnessTemperature", DATA) + ec.codes_set_array(self.buf1, "#1#brightnessTemperature", DATA) self.m = mock.mock_open() # only our offline product contain MPEF product headers so we get the metadata from there - if ('BUFRProd' in filename): - with mock.patch('satpy.readers.seviri_l2_bufr.np.fromfile') as fromfile: + if ("BUFRProd" in filename): + with mock.patch("satpy.readers.seviri_l2_bufr.np.fromfile") as fromfile: fromfile.return_value = MPEF_PRODUCT_HEADER - with mock.patch('satpy.readers.seviri_l2_bufr.recarray2dict') as recarray2dict: + with mock.patch("satpy.readers.seviri_l2_bufr.recarray2dict") as recarray2dict: recarray2dict.side_effect = (lambda x: x) self.fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO2, FILETYPE_INFO, with_area_definition=with_adef, rectification_longitude=rect_lon) @@ -148,13 +148,13 @@ def __init__(self, filename, with_adef=False, rect_lon='default'): else: # No Mpef Header so we get the metadata from the BUFR messages - with mock.patch('satpy.readers.seviri_l2_bufr.open', self.m, create=True): - with mock.patch('eccodes.codes_bufr_new_from_file', + with mock.patch("satpy.readers.seviri_l2_bufr.open", self.m, create=True): + with mock.patch("eccodes.codes_bufr_new_from_file", side_effect=[self.buf1, None, self.buf1, None, self.buf1, None]) as ec1: ec1.return_value = ec1.side_effect - with mock.patch('eccodes.codes_set') as ec2: + with mock.patch("eccodes.codes_set") as ec2: ec2.return_value = 1 - with mock.patch('eccodes.codes_release') as ec5: + with mock.patch("eccodes.codes_release") as ec5: ec5.return_value = 1 self.fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO, FILETYPE_INFO, with_area_definition=with_adef, @@ -162,15 +162,15 @@ def __init__(self, filename, with_adef=False, rect_lon='default'): def get_data(self, dataset_info): """Read data from mock file.""" - with mock.patch('satpy.readers.seviri_l2_bufr.open', self.m, create=True): - with mock.patch('eccodes.codes_bufr_new_from_file', + with mock.patch("satpy.readers.seviri_l2_bufr.open", self.m, create=True): + with mock.patch("eccodes.codes_bufr_new_from_file", side_effect=[self.buf1, self.buf1, None]) as ec1: ec1.return_value = ec1.side_effect - with mock.patch('eccodes.codes_set') as ec2: + with mock.patch("eccodes.codes_set") as ec2: ec2.return_value = 1 - with mock.patch('eccodes.codes_release') as ec5: + with mock.patch("eccodes.codes_release") as ec5: ec5.return_value = 1 - z = self.fh.get_dataset(make_dataid(name=dataset_info['name'], resolution=48000), dataset_info) + z = self.fh.get_dataset(make_dataid(name=dataset_info["name"], resolution=48000), dataset_info) return z @@ -193,9 +193,9 @@ def test_attributes_with_swath_definition(input_file): """Test correctness of dataset attributes with data loaded with a SwathDefinition (default behaviour).""" bufr_obj = SeviriL2BufrData(input_file) z = bufr_obj.get_data(DATASET_INFO) - assert z.attrs['platform_name'] == DATASET_ATTRS['platform_name'] - assert z.attrs['ssp_lon'] == DATASET_ATTRS['ssp_lon'] - assert z.attrs['seg_size'] == DATASET_ATTRS['seg_size'] + assert z.attrs["platform_name"] == DATASET_ATTRS["platform_name"] + assert z.attrs["ssp_lon"] == DATASET_ATTRS["ssp_lon"] + assert z.attrs["seg_size"] == DATASET_ATTRS["seg_size"] @staticmethod def test_attributes_with_area_definition(input_file): @@ -204,9 +204,9 @@ def test_attributes_with_area_definition(input_file): _ = bufr_obj.get_data(DATASET_INFO_LAT) # We need to load the lat/lon data in order to _ = bufr_obj.get_data(DATASET_INFO_LON) # populate the file handler with these data z = bufr_obj.get_data(DATASET_INFO) - assert z.attrs['platform_name'] == DATASET_ATTRS['platform_name'] - assert z.attrs['ssp_lon'] == DATASET_ATTRS['ssp_lon'] - assert z.attrs['seg_size'] == DATASET_ATTRS['seg_size'] + assert z.attrs["platform_name"] == DATASET_ATTRS["platform_name"] + assert z.attrs["ssp_lon"] == DATASET_ATTRS["ssp_lon"] + assert z.attrs["seg_size"] == DATASET_ATTRS["seg_size"] @staticmethod def test_data_with_swath_definition(input_file): @@ -242,7 +242,7 @@ def test_data_with_area_definition(self, input_file): # Test that the correct AreaDefinition is identified for products with 3 pixel segements bufr_obj.fh.seg_size = 3 - ad_ext = bufr_obj.fh._construct_area_def(make_dataid(name='dummmy', resolution=9000)) + ad_ext = bufr_obj.fh._construct_area_def(make_dataid(name="dummmy", resolution=9000)) assert ad_ext == AREA_DEF_EXT def test_data_with_rect_lon(self, input_file): @@ -260,14 +260,14 @@ def test_data_with_rect_lon(self, input_file): class SeviriL2AMVBufrData: """Mock SEVIRI L2 AMV BUFR data.""" - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") def __init__(self, filename): """Initialize by mocking test data for testing the SEVIRI L2 BUFR reader.""" from satpy.readers.seviri_l2_bufr import SeviriL2BufrFileHandler - with mock.patch('satpy.readers.seviri_l2_bufr.np.fromfile'): + with mock.patch("satpy.readers.seviri_l2_bufr.np.fromfile"): self.fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO2, - filetype_info={'file_type': 'seviri_l2_bufr_amv'}, + filetype_info={"file_type": "seviri_l2_bufr_amv"}, with_area_definition=True) @@ -277,5 +277,5 @@ class TestSeviriL2AMVBufrReader: @staticmethod def test_amv_with_area_def(): """Test that AMV data can not be loaded with an area definition.""" - bufr_obj = SeviriL2AMVBufrData('AMVBUFRProd_20201110124500Z_00_OMPEFS04_MET11_FES_E0000') + bufr_obj = SeviriL2AMVBufrData("AMVBUFRProd_20201110124500Z_00_OMPEFS04_MET11_FES_E0000") assert bufr_obj.fh.with_adef is False diff --git a/satpy/tests/reader_tests/test_seviri_l2_grib.py b/satpy/tests/reader_tests/test_seviri_l2_grib.py index faee3f9bdb..d3b40d6caa 100644 --- a/satpy/tests/reader_tests/test_seviri_l2_grib.py +++ b/satpy/tests/reader_tests/test_seviri_l2_grib.py @@ -29,17 +29,17 @@ # Dictionary to be used as fake GRIB message FAKE_MESSAGE = { - 'longitudeOfSubSatellitePointInDegrees': 9.5, - 'dataDate': 20191020, - 'dataTime': 1745, - 'Nx': 1000, - 'Ny': 1200, - 'earthMajorAxis': 6400., - 'earthMinorAxis': 6300., - 'NrInRadiusOfEarth': 6., - 'XpInGridLengths': 500, - 'parameterNumber': 30, - 'missingValue': 9999, + "longitudeOfSubSatellitePointInDegrees": 9.5, + "dataDate": 20191020, + "dataTime": 1745, + "Nx": 1000, + "Ny": 1200, + "earthMajorAxis": 6400., + "earthMinorAxis": 6300., + "NrInRadiusOfEarth": 6., + "XpInGridLengths": 500, + "parameterNumber": 30, + "missingValue": 9999, } # List to be used as fake GID source @@ -49,7 +49,7 @@ class Test_SeviriL2GribFileHandler(unittest.TestCase): """Test the SeviriL2GribFileHandler reader.""" - @mock.patch('satpy.readers.seviri_l2_grib.ec') + @mock.patch("satpy.readers.seviri_l2_grib.ec") def setUp(self, ec_): """Set up the test by creating a mocked eccodes library.""" fake_gid_generator = (i for i in FAKE_GID) @@ -58,9 +58,9 @@ def setUp(self, ec_): ec_.codes_get_values.return_value = np.ones(1000*1200) self.ec_ = ec_ - @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") - @mock.patch('satpy.readers.seviri_l2_grib.xr') - @mock.patch('satpy.readers.seviri_l2_grib.da') + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") + @mock.patch("satpy.readers.seviri_l2_grib.xr") + @mock.patch("satpy.readers.seviri_l2_grib.da") def test_data_reading(self, da_, xr_): """Test the reading of data from the product.""" from satpy.readers.seviri_l2_grib import REPEAT_CYCLE_DURATION, SeviriL2GribFileHandler @@ -68,18 +68,18 @@ def test_data_reading(self, da_, xr_): CHUNK_SIZE = get_legacy_chunk_size() with mock.patch("builtins.open", mock.mock_open()) as mock_file: - with mock.patch('satpy.readers.seviri_l2_grib.ec', self.ec_): + with mock.patch("satpy.readers.seviri_l2_grib.ec", self.ec_): self.reader = SeviriL2GribFileHandler( - filename='test.grib', + filename="test.grib", filename_info={ - 'spacecraft': 'MET11', - 'start_time': datetime.datetime(year=2020, month=10, day=20, + "spacecraft": "MET11", + "start_time": datetime.datetime(year=2020, month=10, day=20, hour=19, minute=45, second=0) }, filetype_info={} ) - dataset_id = make_dataid(name='dummmy', resolution=3000) + dataset_id = make_dataid(name="dummmy", resolution=3000) # Checks that the codes_grib_multi_support_on function has been called self.ec_.codes_grib_multi_support_on.assert_called() @@ -91,15 +91,14 @@ def test_data_reading(self, da_, xr_): self.ec_.codes_release.reset_mock() # Checks the correct execution of the get_dataset function with a valid parameter_number - valid_dataset = self.reader.get_dataset(dataset_id, {'parameter_number': 30}) + valid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 30}) # Checks the correct file open call - mock_file.assert_called_with('test.grib', 'rb') + mock_file.assert_called_with("test.grib", "rb") # Checks that the dataset has been created as a DataArray object - self.assertEqual(valid_dataset._extract_mock_name(), 'xr.DataArray()') + assert valid_dataset._extract_mock_name() == "xr.DataArray()" # Checks that codes_release has been called after each codes_grib_new_from_file call # (except after the last one which has returned a None) - self.assertEqual(self.ec_.codes_grib_new_from_file.call_count, - self.ec_.codes_release.call_count + 1) + assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 # Restarts the id generator and clears the call history fake_gid_generator = (i for i in FAKE_GID) @@ -108,77 +107,76 @@ def test_data_reading(self, da_, xr_): self.ec_.codes_release.reset_mock() # Checks the correct execution of the get_dataset function with an invalid parameter_number - invalid_dataset = self.reader.get_dataset(dataset_id, {'parameter_number': 50}) + invalid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 50}) # Checks that the function returns None - self.assertEqual(invalid_dataset, None) + assert invalid_dataset is None # Checks that codes_release has been called after each codes_grib_new_from_file call # (except after the last one which has returned a None) - self.assertEqual(self.ec_.codes_grib_new_from_file.call_count, - self.ec_.codes_release.call_count + 1) + assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 # Checks the basic data reading - self.assertEqual(REPEAT_CYCLE_DURATION, 15) + assert REPEAT_CYCLE_DURATION == 15 # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions attributes = self.reader._get_attributes() expected_attributes = { - 'orbital_parameters': { - 'projection_longitude': 9.5 + "orbital_parameters": { + "projection_longitude": 9.5 }, - 'sensor': 'seviri', - 'platform_name': 'Meteosat-11' + "sensor": "seviri", + "platform_name": "Meteosat-11" } - self.assertEqual(attributes, expected_attributes) + assert attributes == expected_attributes # Checks the reading of an array from the message self.reader._get_xarray_from_msg(0) # Checks that dask.array has been called with the correct arguments name, args, kwargs = da_.mock_calls[0] - self.assertTrue(np.all(args[0] == np.ones((1200, 1000)))) - self.assertEqual(args[1], CHUNK_SIZE) + assert np.all(args[0] == np.ones((1200, 1000))) + assert args[1] == CHUNK_SIZE # Checks that xarray.DataArray has been called with the correct arguments name, args, kwargs = xr_.mock_calls[0] - self.assertEqual(kwargs['dims'], ('y', 'x')) + assert kwargs["dims"] == ("y", "x") # Checks the correct execution of the _get_proj_area function pdict, area_dict = self.reader._get_proj_area(0) expected_pdict = { - 'a': 6400000., - 'b': 6300000., - 'h': 32000000., - 'ssp_lon': 9.5, - 'nlines': 1000, - 'ncols': 1200, - 'a_name': 'msg_seviri_rss_3km', - 'a_desc': 'MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution', - 'p_id': '', + "a": 6400000., + "b": 6300000., + "h": 32000000., + "ssp_lon": 9.5, + "nlines": 1000, + "ncols": 1200, + "a_name": "msg_seviri_rss_3km", + "a_desc": "MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution", + "p_id": "", } - self.assertEqual(pdict, expected_pdict) + assert pdict == expected_pdict expected_area_dict = { - 'center_point': 500, - 'north': 1200, - 'east': 1, - 'west': 1000, - 'south': 1, + "center_point": 500, + "north": 1200, + "east": 1, + "west": 1000, + "south": 1, } - self.assertEqual(area_dict, expected_area_dict) + assert area_dict == expected_area_dict # Checks the correct execution of the get_area_def function - with mock.patch('satpy.readers.seviri_l2_grib.calculate_area_extent', - mock.Mock(name='calculate_area_extent')) as cae: - with mock.patch('satpy.readers.seviri_l2_grib.get_area_definition', mock.Mock()) as gad: - dataset_id = make_dataid(name='dummmy', resolution=400.) + with mock.patch("satpy.readers.seviri_l2_grib.calculate_area_extent", + mock.Mock(name="calculate_area_extent")) as cae: + with mock.patch("satpy.readers.seviri_l2_grib.get_area_definition", mock.Mock()) as gad: + dataset_id = make_dataid(name="dummmy", resolution=400.) self.reader.get_area_def(dataset_id) # Asserts that calculate_area_extent has been called with the correct arguments - expected_args = ({'center_point': 500, 'east': 1, 'west': 1000, 'south': 1, 'north': 1200, - 'column_step': 400., 'line_step': 400.},) + expected_args = ({"center_point": 500, "east": 1, "west": 1000, "south": 1, "north": 1200, + "column_step": 400., "line_step": 400.},) name, args, kwargs = cae.mock_calls[0] - self.assertEqual(args, expected_args) + assert args == expected_args # Asserts that get_area_definition has been called with the correct arguments name, args, kwargs = gad.mock_calls[0] - self.assertEqual(args[0], expected_pdict) + assert args[0] == expected_pdict # The second argument must be the return result of calculate_area_extent - self.assertEqual(args[1]._extract_mock_name(), 'calculate_area_extent()') + assert args[1]._extract_mock_name() == "calculate_area_extent()" diff --git a/satpy/tests/reader_tests/test_slstr_l1b.py b/satpy/tests/reader_tests/test_slstr_l1b.py index 9f516b4cde..63a43c9c79 100644 --- a/satpy/tests/reader_tests/test_slstr_l1b.py +++ b/satpy/tests/reader_tests/test_slstr_l1b.py @@ -27,40 +27,40 @@ from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange from satpy.readers.slstr_l1b import NCSLSTR1B, NCSLSTRAngles, NCSLSTRFlag, NCSLSTRGeo -local_id_keys_config = {'name': { - 'required': True, +local_id_keys_config = {"name": { + "required": True, }, - 'wavelength': { - 'type': WavelengthRange, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'reflectance', - 'brightness_temperature', - 'radiance', - 'counts' + "resolution": None, + "calibration": { + "enum": [ + "reflectance", + "brightness_temperature", + "radiance", + "counts" ] }, - 'stripe': { - 'enum': [ - 'a', - 'b', - 'c', - 'i', - 'f', + "stripe": { + "enum": [ + "a", + "b", + "c", + "i", + "f", ] }, - 'view': { - 'enum': [ - 'nadir', - 'oblique', + "view": { + "enum": [ + "nadir", + "oblique", ] }, - 'modifiers': { - 'required': True, - 'default': ModifierTuple(), - 'type': ModifierTuple, + "modifiers": { + "required": True, + "default": ModifierTuple(), + "type": ModifierTuple, }, } @@ -68,7 +68,7 @@ class TestSLSTRL1B(unittest.TestCase): """Common setup for SLSTR_L1B tests.""" - @mock.patch('satpy.readers.slstr_l1b.xr') + @mock.patch("satpy.readers.slstr_l1b.xr") def setUp(self, xr_): """Create a fake dataset using the given radiance data.""" self.base_data = np.array(([1., 2., 3.], [4., 5., 6.])) @@ -77,34 +77,34 @@ def setUp(self, xr_): self.end_time = "2020-05-10T12:06:18.012Z" self.rad = xr.DataArray( self.base_data, - dims=('columns', 'rows'), - attrs={'scale_factor': 1.0, 'add_offset': 0.0, - '_FillValue': -32768, 'units': 'mW.m-2.sr-1.nm-1', + dims=("columns", "rows"), + attrs={"scale_factor": 1.0, "add_offset": 0.0, + "_FillValue": -32768, "units": "mW.m-2.sr-1.nm-1", } ) det = xr.DataArray( self.base_data, - dims=('columns', 'rows'), - attrs={'scale_factor': 1.0, 'add_offset': 0.0, - '_FillValue': 255, + dims=("columns", "rows"), + attrs={"scale_factor": 1.0, "add_offset": 0.0, + "_FillValue": 255, } ) self.fake_dataset = xr.Dataset( data_vars={ - 'S5_radiance_an': self.rad, - 'S9_BT_ao': self.rad, - 'foo_radiance_an': self.rad, - 'S5_solar_irradiances': self.rad, - 'geometry_tn': self.rad, - 'latitude_an': self.rad, - 'x_tx': self.rad, - 'y_tx': self.rad, - 'x_in': self.rad, - 'y_in': self.rad, - 'x_an': self.rad, - 'y_an': self.rad, - 'flags_an': self.rad, - 'detector_an': det, + "S5_radiance_an": self.rad, + "S9_BT_ao": self.rad, + "foo_radiance_an": self.rad, + "S5_solar_irradiances": self.rad, + "geometry_tn": self.rad, + "latitude_an": self.rad, + "x_tx": self.rad, + "y_tx": self.rad, + "x_in": self.rad, + "y_in": self.rad, + "x_an": self.rad, + "y_an": self.rad, + "flags_an": self.rad, + "detector_an": det, }, attrs={ "start_time": self.start_time, @@ -129,122 +129,122 @@ def ev(foo_x, foo_y): """Fake function to return interpolated data.""" return np.zeros((3, 2)) - @mock.patch('satpy.readers.slstr_l1b.xr') - @mock.patch('scipy.interpolate.RectBivariateSpline') + @mock.patch("satpy.readers.slstr_l1b.xr") + @mock.patch("scipy.interpolate.RectBivariateSpline") def test_instantiate(self, bvs_, xr_): """Test initialization of file handlers.""" bvs_.return_value = self.FakeSpl xr_.open_dataset.return_value = self.fake_dataset good_start = datetime.strptime(self.start_time, - '%Y-%m-%dT%H:%M:%S.%fZ') + "%Y-%m-%dT%H:%M:%S.%fZ") good_end = datetime.strptime(self.end_time, - '%Y-%m-%dT%H:%M:%S.%fZ') + "%Y-%m-%dT%H:%M:%S.%fZ") - ds_id = make_dataid(name='foo', calibration='radiance', - stripe='a', view='nadir') - ds_id_500 = make_dataid(name='foo', calibration='radiance', - stripe='a', view='nadir', resolution=500) - filename_info = {'mission_id': 'S3A', 'dataset_name': 'foo', - 'start_time': 0, 'end_time': 0, - 'stripe': 'a', 'view': 'n'} - test = NCSLSTR1B('somedir/S1_radiance_an.nc', filename_info, 'c') - assert test.view == 'nadir' - assert test.stripe == 'a' - test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'foo'})) - self.assertEqual(test.start_time, good_start) - self.assertEqual(test.end_time, good_end) + ds_id = make_dataid(name="foo", calibration="radiance", + stripe="a", view="nadir") + ds_id_500 = make_dataid(name="foo", calibration="radiance", + stripe="a", view="nadir", resolution=500) + filename_info = {"mission_id": "S3A", "dataset_name": "foo", + "start_time": 0, "end_time": 0, + "stripe": "a", "view": "n"} + test = NCSLSTR1B("somedir/S1_radiance_an.nc", filename_info, "c") + assert test.view == "nadir" + assert test.stripe == "a" + test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) + assert test.start_time == good_start + assert test.end_time == good_end xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() - filename_info = {'mission_id': 'S3A', 'dataset_name': 'foo', - 'start_time': 0, 'end_time': 0, - 'stripe': 'c', 'view': 'o'} - test = NCSLSTR1B('somedir/S1_radiance_co.nc', filename_info, 'c') - assert test.view == 'oblique' - assert test.stripe == 'c' - test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'foo'})) - self.assertEqual(test.start_time, good_start) - self.assertEqual(test.end_time, good_end) + filename_info = {"mission_id": "S3A", "dataset_name": "foo", + "start_time": 0, "end_time": 0, + "stripe": "c", "view": "o"} + test = NCSLSTR1B("somedir/S1_radiance_co.nc", filename_info, "c") + assert test.view == "oblique" + assert test.stripe == "c" + test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) + assert test.start_time == good_start + assert test.end_time == good_end xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() - filename_info = {'mission_id': 'S3A', 'dataset_name': 'foo', - 'start_time': 0, 'end_time': 0, - 'stripe': 'a', 'view': 'n'} - test = NCSLSTRGeo('somedir/geometry_an.nc', filename_info, 'c') - test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'latitude_{stripe:1s}{view:1s}'})) - self.assertEqual(test.start_time, good_start) - self.assertEqual(test.end_time, good_end) + filename_info = {"mission_id": "S3A", "dataset_name": "foo", + "start_time": 0, "end_time": 0, + "stripe": "a", "view": "n"} + test = NCSLSTRGeo("somedir/geometry_an.nc", filename_info, "c") + test.get_dataset(ds_id, dict(filename_info, **{"file_key": "latitude_{stripe:1s}{view:1s}"})) + assert test.start_time == good_start + assert test.end_time == good_end xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() - test = NCSLSTRFlag('somedir/S1_radiance_an.nc', filename_info, 'c') - test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'flags_{stripe:1s}{view:1s}'})) - assert test.view == 'nadir' - assert test.stripe == 'a' - self.assertEqual(test.start_time, good_start) - self.assertEqual(test.end_time, good_end) + test = NCSLSTRFlag("somedir/S1_radiance_an.nc", filename_info, "c") + test.get_dataset(ds_id, dict(filename_info, **{"file_key": "flags_{stripe:1s}{view:1s}"})) + assert test.view == "nadir" + assert test.stripe == "a" + assert test.start_time == good_start + assert test.end_time == good_end xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() - test = NCSLSTRAngles('somedir/S1_radiance_an.nc', filename_info, 'c') - test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'geometry_t{view:1s}'})) - self.assertEqual(test.start_time, good_start) - self.assertEqual(test.end_time, good_end) + test = NCSLSTRAngles("somedir/S1_radiance_an.nc", filename_info, "c") + test.get_dataset(ds_id, dict(filename_info, **{"file_key": "geometry_t{view:1s}"})) + assert test.start_time == good_start + assert test.end_time == good_end xr_.open_dataset.assert_called() xr_.open_dataset.reset_mock() - test.get_dataset(ds_id_500, dict(filename_info, **{'file_key': 'geometry_t{view:1s}'})) + test.get_dataset(ds_id_500, dict(filename_info, **{"file_key": "geometry_t{view:1s}"})) class TestSLSTRCalibration(TestSLSTRL1B): """Test the implementation of the calibration factors.""" - @mock.patch('satpy.readers.slstr_l1b.xr') + @mock.patch("satpy.readers.slstr_l1b.xr") def test_radiance_calibration(self, xr_): """Test radiance calibration steps.""" from satpy.readers.slstr_l1b import CHANCALIB_FACTORS xr_.open_dataset.return_value = self.fake_dataset - ds_id = make_dataid(name='foo', calibration='radiance', - stripe='a', view='nadir') - filename_info = {'mission_id': 'S3A', 'dataset_name': 'foo', - 'start_time': 0, 'end_time': 0, - 'stripe': 'a', 'view': 'n'} + ds_id = make_dataid(name="foo", calibration="radiance", + stripe="a", view="nadir") + filename_info = {"mission_id": "S3A", "dataset_name": "foo", + "start_time": 0, "end_time": 0, + "stripe": "a", "view": "n"} - test = NCSLSTR1B('somedir/S1_radiance_co.nc', filename_info, 'c') + test = NCSLSTR1B("somedir/S1_radiance_co.nc", filename_info, "c") # Check warning is raised if we don't have calibration with warnings.catch_warnings(record=True) as w: - test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'foo'})) + test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) assert issubclass(w[-1].category, UserWarning) # Check user calibration is used correctly - test = NCSLSTR1B('somedir/S1_radiance_co.nc', filename_info, 'c', - user_calibration={'foo_nadir': 0.4}) - data = test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'foo'})) + test = NCSLSTR1B("somedir/S1_radiance_co.nc", filename_info, "c", + user_calibration={"foo_nadir": 0.4}) + data = test.get_dataset(ds_id, dict(filename_info, **{"file_key": "foo"})) np.testing.assert_allclose(data.values, self.base_data * 0.4) # Check internal calibration is used correctly - ds_id = make_dataid(name='S5', calibration='radiance', stripe='a', view='nadir') - filename_info['dataset_name'] = 'S5' - test = NCSLSTR1B('somedir/S1_radiance_an.nc', filename_info, 'c') - data = test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'S5'})) + ds_id = make_dataid(name="S5", calibration="radiance", stripe="a", view="nadir") + filename_info["dataset_name"] = "S5" + test = NCSLSTR1B("somedir/S1_radiance_an.nc", filename_info, "c") + data = test.get_dataset(ds_id, dict(filename_info, **{"file_key": "S5"})) np.testing.assert_allclose(data.values, - self.base_data * CHANCALIB_FACTORS['S5_nadir']) + self.base_data * CHANCALIB_FACTORS["S5_nadir"]) - @mock.patch('satpy.readers.slstr_l1b.xr') - @mock.patch('satpy.readers.slstr_l1b.da') + @mock.patch("satpy.readers.slstr_l1b.xr") + @mock.patch("satpy.readers.slstr_l1b.da") def test_reflectance_calibration(self, da_, xr_): """Test reflectance calibration.""" xr_.open_dataset.return_value = self.fake_dataset da_.map_blocks.return_value = self.rad / 100. - filename_info = {'mission_id': 'S3A', 'dataset_name': 'S5', - 'start_time': 0, 'end_time': 0, - 'stripe': 'a', 'view': 'n'} - ds_id = make_dataid(name='S5', calibration='reflectance', stripe='a', view='nadir') - test = NCSLSTR1B('somedir/S1_radiance_an.nc', filename_info, 'c') - data = test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'S5'})) - self.assertEqual(data.units, '%') + filename_info = {"mission_id": "S3A", "dataset_name": "S5", + "start_time": 0, "end_time": 0, + "stripe": "a", "view": "n"} + ds_id = make_dataid(name="S5", calibration="reflectance", stripe="a", view="nadir") + test = NCSLSTR1B("somedir/S1_radiance_an.nc", filename_info, "c") + data = test.get_dataset(ds_id, dict(filename_info, **{"file_key": "S5"})) + assert data.units == "%" np.testing.assert_allclose(data.values, self.rad * np.pi) def test_cal_rad(self): diff --git a/satpy/tests/reader_tests/test_smos_l2_wind.py b/satpy/tests/reader_tests/test_smos_l2_wind.py index 731cd64181..519030447b 100644 --- a/satpy/tests/reader_tests/test_smos_l2_wind.py +++ b/satpy/tests/reader_tests/test_smos_l2_wind.py @@ -35,39 +35,39 @@ class FakeNetCDF4FileHandlerSMOSL2WIND(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" from xarray import DataArray - dt_s = filename_info.get('start_time', datetime(2020, 4, 22, 12, 0, 0)) - dt_e = filename_info.get('end_time', datetime(2020, 4, 22, 12, 0, 0)) + dt_s = filename_info.get("start_time", datetime(2020, 4, 22, 12, 0, 0)) + dt_e = filename_info.get("end_time", datetime(2020, 4, 22, 12, 0, 0)) - if filetype_info['file_type'] == 'smos_l2_wind': + if filetype_info["file_type"] == "smos_l2_wind": file_content = { - '/attr/time_coverage_start': dt_s.strftime('%Y-%m-%dT%H:%M:%S Z'), - '/attr/time_coverage_end': dt_e.strftime('%Y-%m-%dT%H:%M:%S Z'), - '/attr/platform_shortname': 'SM', - '/attr/platform': 'SMOS', - '/attr/instrument': 'MIRAS', - '/attr/processing_level': 'L2', - '/attr/geospatial_bounds_vertical_crs': 'EPSG:4623', + "/attr/time_coverage_start": dt_s.strftime("%Y-%m-%dT%H:%M:%S Z"), + "/attr/time_coverage_end": dt_e.strftime("%Y-%m-%dT%H:%M:%S Z"), + "/attr/platform_shortname": "SM", + "/attr/platform": "SMOS", + "/attr/instrument": "MIRAS", + "/attr/processing_level": "L2", + "/attr/geospatial_bounds_vertical_crs": "EPSG:4623", } - file_content['lat'] = np.arange(-90., 90.25, 0.25) - file_content['lat/shape'] = (len(file_content['lat']),) - file_content['lat'] = DataArray(file_content['lat'], dims=('lat')) - file_content['lat'].attrs['_FillValue'] = -999.0 - - file_content['lon'] = np.arange(0., 360., 0.25) - file_content['lon/shape'] = (len(file_content['lon']),) - file_content['lon'] = DataArray(file_content['lon'], dims=('lon')) - file_content['lon'].attrs['_FillValue'] = -999.0 - - file_content['wind_speed'] = np.ndarray(shape=(1, # Time dimension - len(file_content['lat']), - len(file_content['lon']))) - file_content['wind_speed/shape'] = (1, - len(file_content['lat']), - len(file_content['lon'])) - file_content['wind_speed'] = DataArray(file_content['wind_speed'], dims=('time', 'lat', 'lon'), - coords=[[1], file_content['lat'], file_content['lon']]) - file_content['wind_speed'].attrs['_FillValue'] = -999.0 + file_content["lat"] = np.arange(-90., 90.25, 0.25) + file_content["lat/shape"] = (len(file_content["lat"]),) + file_content["lat"] = DataArray(file_content["lat"], dims=("lat")) + file_content["lat"].attrs["_FillValue"] = -999.0 + + file_content["lon"] = np.arange(0., 360., 0.25) + file_content["lon/shape"] = (len(file_content["lon"]),) + file_content["lon"] = DataArray(file_content["lon"], dims=("lon")) + file_content["lon"].attrs["_FillValue"] = -999.0 + + file_content["wind_speed"] = np.ndarray(shape=(1, # Time dimension + len(file_content["lat"]), + len(file_content["lon"]))) + file_content["wind_speed/shape"] = (1, + len(file_content["lat"]), + len(file_content["lon"])) + file_content["wind_speed"] = DataArray(file_content["wind_speed"], dims=("time", "lat", "lon"), + coords=[[1], file_content["lat"], file_content["lon"]]) + file_content["wind_speed"].attrs["_FillValue"] = -999.0 else: raise AssertionError() @@ -84,9 +84,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.smos_l2_wind import SMOSL2WINDFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(SMOSL2WINDFileHandler, '__bases__', (FakeNetCDF4FileHandlerSMOSL2WIND,)) + self.p = mock.patch.object(SMOSL2WINDFileHandler, "__bases__", (FakeNetCDF4FileHandlerSMOSL2WIND,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -99,92 +99,92 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', + "SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_wind_speed(self): """Load wind_speed dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.smos_l2_wind.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.smos_l2_wind.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', + "SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", ]) r.create_filehandlers(loadables) - ds = r.load(['wind_speed']) - self.assertEqual(len(ds), 1) + ds = r.load(["wind_speed"]) + assert len(ds) == 1 for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'SM') - self.assertEqual(d.attrs['sensor'], 'MIRAS') - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) - self.assertIn('y', d.dims) - self.assertIn('x', d.dims) - self.assertEqual(d.shape, (719, 1440)) - self.assertEqual(d.y[0].data, -89.75) - self.assertEqual(d.y[d.shape[0] - 1].data, 89.75) + assert d.attrs["platform_shortname"] == "SM" + assert d.attrs["sensor"] == "MIRAS" + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert "y" in d.dims + assert "x" in d.dims + assert d.shape == (719, 1440) + assert d.y[0].data == -89.75 + assert d.y[d.shape[0] - 1].data == 89.75 def test_load_lat(self): """Load lat dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.smos_l2_wind.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.smos_l2_wind.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', + "SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", ]) r.create_filehandlers(loadables) - ds = r.load(['lat']) - self.assertEqual(len(ds), 1) + ds = r.load(["lat"]) + assert len(ds) == 1 for d in ds.values(): - self.assertIn('y', d.dims) - self.assertEqual(d.shape, (719,)) - self.assertEqual(d.data[0], -89.75) - self.assertEqual(d.data[d.shape[0] - 1], 89.75) + assert "y" in d.dims + assert d.shape == (719,) + assert d.data[0] == -89.75 + assert d.data[d.shape[0] - 1] == 89.75 def test_load_lon(self): """Load lon dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.smos_l2_wind.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.smos_l2_wind.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', + "SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", ]) r.create_filehandlers(loadables) - ds = r.load(['lon']) - self.assertEqual(len(ds), 1) + ds = r.load(["lon"]) + assert len(ds) == 1 for d in ds.values(): - self.assertIn('x', d.dims) - self.assertEqual(d.shape, (1440,)) - self.assertEqual(d.data[0], -180.0) - self.assertEqual(d.data[d.shape[0] - 1], 179.75) + assert "x" in d.dims + assert d.shape == (1440,) + assert d.data[0] == -180.0 + assert d.data[d.shape[0] - 1] == 179.75 def test_adjust_lon(self): """Load adjust longitude dataset.""" from xarray import DataArray from satpy.readers.smos_l2_wind import SMOSL2WINDFileHandler - smos_l2_wind_fh = SMOSL2WINDFileHandler('SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', - {}, filetype_info={'file_type': 'smos_l2_wind'}) - data = DataArray(np.arange(0., 360., 0.25), dims=('lon')) + smos_l2_wind_fh = SMOSL2WINDFileHandler("SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", + {}, filetype_info={"file_type": "smos_l2_wind"}) + data = DataArray(np.arange(0., 360., 0.25), dims=("lon")) adjusted = smos_l2_wind_fh._adjust_lon_coord(data) expected = DataArray(np.concatenate((np.arange(0, 180., 0.25), np.arange(-180.0, 0, 0.25))), - dims=('lon')) - self.assertEqual(adjusted.data.tolist(), expected.data.tolist()) + dims=("lon")) + assert adjusted.data.tolist() == expected.data.tolist() def test_roll_dataset(self): """Load roll of dataset along the lon coordinate.""" from xarray import DataArray from satpy.readers.smos_l2_wind import SMOSL2WINDFileHandler - smos_l2_wind_fh = SMOSL2WINDFileHandler('SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc', - {}, filetype_info={'file_type': 'smos_l2_wind'}) - data = DataArray(np.arange(0., 360., 0.25), dims=('lon')) + smos_l2_wind_fh = SMOSL2WINDFileHandler("SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc", + {}, filetype_info={"file_type": "smos_l2_wind"}) + data = DataArray(np.arange(0., 360., 0.25), dims=("lon")) data = smos_l2_wind_fh._adjust_lon_coord(data) adjusted = smos_l2_wind_fh._roll_dataset_lon_coord(data) expected = np.arange(-180., 180., 0.25) - self.assertEqual(adjusted.data.tolist(), expected.tolist()) + assert adjusted.data.tolist() == expected.tolist() diff --git a/satpy/tests/reader_tests/test_tropomi_l2.py b/satpy/tests/reader_tests/test_tropomi_l2.py index 4b6e3a8652..7305bf365c 100644 --- a/satpy/tests/reader_tests/test_tropomi_l2.py +++ b/satpy/tests/reader_tests/test_tropomi_l2.py @@ -41,41 +41,41 @@ class FakeNetCDF4FileHandlerTL2(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" - dt_s = filename_info.get('start_time', datetime(2016, 1, 1, 12, 0, 0)) - dt_e = filename_info.get('end_time', datetime(2016, 1, 1, 12, 0, 0)) + dt_s = filename_info.get("start_time", datetime(2016, 1, 1, 12, 0, 0)) + dt_e = filename_info.get("end_time", datetime(2016, 1, 1, 12, 0, 0)) - if filetype_info['file_type'] == 'tropomi_l2': + if filetype_info["file_type"] == "tropomi_l2": file_content = { - '/attr/time_coverage_start': (dt_s+timedelta(minutes=22)).strftime('%Y-%m-%dT%H:%M:%SZ'), - '/attr/time_coverage_end': (dt_e-timedelta(minutes=22)).strftime('%Y-%m-%dT%H:%M:%SZ'), - '/attr/platform_shortname': 'S5P', - '/attr/sensor': 'TROPOMI', + "/attr/time_coverage_start": (dt_s+timedelta(minutes=22)).strftime("%Y-%m-%dT%H:%M:%SZ"), + "/attr/time_coverage_end": (dt_e-timedelta(minutes=22)).strftime("%Y-%m-%dT%H:%M:%SZ"), + "/attr/platform_shortname": "S5P", + "/attr/sensor": "TROPOMI", } - file_content['PRODUCT/latitude'] = DEFAULT_FILE_DATA - file_content['PRODUCT/longitude'] = DEFAULT_FILE_DATA - file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds'] = DEFAULT_BOUND_DATA - file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds'] = DEFAULT_BOUND_DATA + file_content["PRODUCT/latitude"] = DEFAULT_FILE_DATA + file_content["PRODUCT/longitude"] = DEFAULT_FILE_DATA + file_content["PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds"] = DEFAULT_BOUND_DATA + file_content["PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds"] = DEFAULT_BOUND_DATA - if 'NO2' in filename: - file_content['PRODUCT/nitrogen_dioxide_total_column'] = DEFAULT_FILE_DATA - if 'SO2' in filename: - file_content['PRODUCT/sulfurdioxide_total_vertical_column'] = DEFAULT_FILE_DATA + if "NO2" in filename: + file_content["PRODUCT/nitrogen_dioxide_total_column"] = DEFAULT_FILE_DATA + if "SO2" in filename: + file_content["PRODUCT/sulfurdioxide_total_vertical_column"] = DEFAULT_FILE_DATA for k in list(file_content.keys()): - if not k.startswith('PRODUCT'): + if not k.startswith("PRODUCT"): continue - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE self._convert_data_content_to_dataarrays(file_content) - file_content['PRODUCT/latitude'].attrs['_FillValue'] = -999.0 - file_content['PRODUCT/longitude'].attrs['_FillValue'] = -999.0 - file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds'].attrs['_FillValue'] = -999.0 - file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds'].attrs['_FillValue'] = -999.0 - if 'NO2' in filename: - file_content['PRODUCT/nitrogen_dioxide_total_column'].attrs['_FillValue'] = -999.0 - if 'SO2' in filename: - file_content['PRODUCT/sulfurdioxide_total_vertical_column'].attrs['_FillValue'] = -999.0 + file_content["PRODUCT/latitude"].attrs["_FillValue"] = -999.0 + file_content["PRODUCT/longitude"].attrs["_FillValue"] = -999.0 + file_content["PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds"].attrs["_FillValue"] = -999.0 + file_content["PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds"].attrs["_FillValue"] = -999.0 + if "NO2" in filename: + file_content["PRODUCT/nitrogen_dioxide_total_column"].attrs["_FillValue"] = -999.0 + if "SO2" in filename: + file_content["PRODUCT/sulfurdioxide_total_vertical_column"].attrs["_FillValue"] = -999.0 else: raise NotImplementedError("Test data for file types other than " @@ -89,9 +89,9 @@ def _convert_data_content_to_dataarrays(self, file_content): for key, val in file_content.items(): if isinstance(val, np.ndarray): if 1 < val.ndim <= 2: - file_content[key] = DataArray(val, dims=('scanline', 'ground_pixel')) + file_content[key] = DataArray(val, dims=("scanline", "ground_pixel")) elif val.ndim > 2: - file_content[key] = DataArray(val, dims=('scanline', 'ground_pixel', 'corner')) + file_content[key] = DataArray(val, dims=("scanline", "ground_pixel", "corner")) else: file_content[key] = DataArray(val) @@ -105,9 +105,9 @@ def setUp(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.tropomi_l2 import TROPOMIL2FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(TROPOMIL2FileHandler, '__bases__', (FakeNetCDF4FileHandlerTL2,)) + self.p = mock.patch.object(TROPOMIL2FileHandler, "__bases__", (FakeNetCDF4FileHandlerTL2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -120,84 +120,82 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc', + "S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_no2(self): """Load NO2 dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.tropomi_l2.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.tropomi_l2.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc', + "S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc", ]) r.create_filehandlers(loadables) - ds = r.load(['nitrogen_dioxide_total_column']) - self.assertEqual(len(ds), 1) + ds = r.load(["nitrogen_dioxide_total_column"]) + assert len(ds) == 1 for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'S5P') - self.assertEqual(d.attrs['sensor'], 'tropomi') - self.assertEqual(d.attrs['time_coverage_start'], datetime(2018, 7, 9, 17, 25, 34)) - self.assertEqual(d.attrs['time_coverage_end'], datetime(2018, 7, 9, 18, 23, 4)) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) - self.assertIn('y', d.dims) - self.assertIn('x', d.dims) + assert d.attrs["platform_shortname"] == "S5P" + assert d.attrs["sensor"] == "tropomi" + assert d.attrs["time_coverage_start"] == datetime(2018, 7, 9, 17, 25, 34) + assert d.attrs["time_coverage_end"] == datetime(2018, 7, 9, 18, 23, 4) + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert "y" in d.dims + assert "x" in d.dims def test_load_so2(self): """Load SO2 dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.tropomi_l2.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.tropomi_l2.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'S5P_OFFL_L2__SO2____20181224T055107_20181224T073237_06198_01_010105_20181230T150634.nc', + "S5P_OFFL_L2__SO2____20181224T055107_20181224T073237_06198_01_010105_20181230T150634.nc", ]) r.create_filehandlers(loadables) - ds = r.load(['sulfurdioxide_total_vertical_column']) - self.assertEqual(len(ds), 1) + ds = r.load(["sulfurdioxide_total_vertical_column"]) + assert len(ds) == 1 for d in ds.values(): - self.assertEqual(d.attrs['platform_shortname'], 'S5P') - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) - self.assertIn('y', d.dims) - self.assertIn('x', d.dims) + assert d.attrs["platform_shortname"] == "S5P" + assert "area" in d.attrs + assert d.attrs["area"] is not None + assert "y" in d.dims + assert "x" in d.dims def test_load_bounds(self): """Load bounds dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with mock.patch('satpy.readers.tropomi_l2.netCDF4.Variable', xr.DataArray): + with mock.patch("satpy.readers.tropomi_l2.netCDF4.Variable", xr.DataArray): loadables = r.select_files_from_pathnames([ - 'S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc', + "S5P_OFFL_L2__NO2____20180709T170334_20180709T184504_03821_01_010002_20180715T184729.nc", ]) r.create_filehandlers(loadables) - keys = ['latitude_bounds', 'longitude_bounds'] + keys = ["latitude_bounds", "longitude_bounds"] ds = r.load(keys) - self.assertEqual(len(ds), 2) + assert len(ds) == 2 for key in keys: - self.assertEqual(ds[key].attrs['platform_shortname'], 'S5P') - self.assertIn('y', ds[key].dims) - self.assertIn('x', ds[key].dims) - self.assertIn('corner', ds[key].dims) + assert ds[key].attrs["platform_shortname"] == "S5P" + assert "y" in ds[key].dims + assert "x" in ds[key].dims + assert "corner" in ds[key].dims # check assembled bounds left = np.vstack([ds[key][:, :, 0], ds[key][-1:, :, 3]]) right = np.vstack([ds[key][:, -1:, 1], ds[key][-1:, -1:, 2]]) dest = np.hstack([left, right]) dest = xr.DataArray(dest, - dims=('y', 'x') + dims=("y", "x") ) dest.attrs = ds[key].attrs - self.assertEqual(dest.attrs['platform_shortname'], 'S5P') - self.assertIn('y', dest.dims) - self.assertIn('x', dest.dims) - self.assertEqual(DEFAULT_FILE_SHAPE[0] + 1, dest.shape[0]) - self.assertEqual(DEFAULT_FILE_SHAPE[1] + 1, dest.shape[1]) - self.assertIsNone(np.testing.assert_array_equal(dest[:-1, :-1], ds[key][:, :, 0])) - self.assertIsNone(np.testing.assert_array_equal(dest[-1, :-1], ds[key][-1, :, 3])) - self.assertIsNone(np.testing.assert_array_equal(dest[:, -1], - np.append(ds[key][:, -1, 1], ds[key][-1:, -1:, 2])) - ) + assert dest.attrs["platform_shortname"] == "S5P" + assert "y" in dest.dims + assert "x" in dest.dims + assert DEFAULT_FILE_SHAPE[0] + 1 == dest.shape[0] + assert DEFAULT_FILE_SHAPE[1] + 1 == dest.shape[1] + np.testing.assert_array_equal(dest[:-1, :-1], ds[key][:, :, 0]) + np.testing.assert_array_equal(dest[-1, :-1], ds[key][-1, :, 3]) + np.testing.assert_array_equal(dest[:, -1], np.append(ds[key][:, -1, 1], ds[key][-1:, -1:, 2])) diff --git a/satpy/tests/reader_tests/test_utils.py b/satpy/tests/reader_tests/test_utils.py index fc38e36c88..6471159449 100644 --- a/satpy/tests/reader_tests/test_utils.py +++ b/satpy/tests/reader_tests/test_utils.py @@ -45,11 +45,11 @@ def test_lonlat_from_geos(self): lon_0 = 0 h = 35785831.00 geos_area.crs = CRS({ - 'a': 6378169.00, - 'b': 6356583.80, - 'h': h, - 'lon_0': lon_0, - 'proj': 'geos'}) + "a": 6378169.00, + "b": 6356583.80, + "h": h, + "lon_0": lon_0, + "proj": "geos"}) proj = pyproj.Proj(geos_area.crs) expected = proj(0, 0, inverse=True) @@ -77,12 +77,12 @@ def test_get_geostationary_bbox(self): geos_area = mock.MagicMock() lon_0 = 0 geos_area.crs = CRS({ - 'proj': 'geos', - 'lon_0': lon_0, - 'a': 6378169.00, - 'b': 6356583.80, - 'h': 35785831.00, - 'units': 'm'}) + "proj": "geos", + "lon_0": lon_0, + "a": 6378169.00, + "b": 6356583.80, + "h": 35785831.00, + "units": "m"}) geos_area.area_extent = [-5500000., -5500000., 5500000., 5500000.] lon, lat = hf.get_geostationary_bounding_box(geos_area, 20) @@ -107,21 +107,21 @@ def test_get_geostationary_angle_extent(self): """Get max geostationary angles.""" geos_area = mock.MagicMock() proj_dict = { - 'proj': 'geos', - 'sweep': 'x', - 'lon_0': -89.5, - 'a': 6378169.00, - 'b': 6356583.80, - 'h': 35785831.00, - 'units': 'm'} + "proj": "geos", + "sweep": "x", + "lon_0": -89.5, + "a": 6378169.00, + "b": 6356583.80, + "h": 35785831.00, + "units": "m"} geos_area.crs = CRS(proj_dict) expected = (0.15185342867090912, 0.15133555510297725) np.testing.assert_allclose(expected, hf.get_geostationary_angle_extent(geos_area)) - proj_dict['a'] = 1000.0 - proj_dict['b'] = 1000.0 - proj_dict['h'] = np.sqrt(2) * 1000.0 - 1000.0 + proj_dict["a"] = 1000.0 + proj_dict["b"] = 1000.0 + proj_dict["h"] = np.sqrt(2) * 1000.0 - 1000.0 geos_area.reset_mock() geos_area.crs = CRS(proj_dict) expected = (np.deg2rad(45), np.deg2rad(45)) @@ -129,12 +129,12 @@ def test_get_geostationary_angle_extent(self): hf.get_geostationary_angle_extent(geos_area)) proj_dict = { - 'proj': 'geos', - 'sweep': 'x', - 'lon_0': -89.5, - 'ellps': 'GRS80', - 'h': 35785831.00, - 'units': 'm'} + "proj": "geos", + "sweep": "x", + "lon_0": -89.5, + "ellps": "GRS80", + "h": 35785831.00, + "units": "m"} geos_area.crs = CRS(proj_dict) expected = (0.15185277703584374, 0.15133971368991794) np.testing.assert_allclose(expected, @@ -144,15 +144,15 @@ def test_geostationary_mask(self): """Test geostationary mask.""" # Compute mask of a very elliptical earth area = pyresample.geometry.AreaDefinition( - 'FLDK', - 'Full Disk', - 'geos', - {'a': '6378169.0', - 'b': '3000000.0', - 'h': '35785831.0', - 'lon_0': '145.0', - 'proj': 'geos', - 'units': 'm'}, + "FLDK", + "Full Disk", + "geos", + {"a": "6378169.0", + "b": "3000000.0", + "h": "35785831.0", + "lon_0": "145.0", + "proj": "geos", + "units": "m"}, 101, 101, (-6498000.088960204, -6498000.088960204, @@ -162,56 +162,56 @@ def test_geostationary_mask(self): # Check results along a couple of lines # a) Horizontal - self.assertTrue(np.all(mask[50, :8] == 0)) - self.assertTrue(np.all(mask[50, 8:93] == 1)) - self.assertTrue(np.all(mask[50, 93:] == 0)) + assert np.all(mask[50, :8] == 0) + assert np.all(mask[50, 8:93] == 1) + assert np.all(mask[50, 93:] == 0) # b) Vertical - self.assertTrue(np.all(mask[:31, 50] == 0)) - self.assertTrue(np.all(mask[31:70, 50] == 1)) - self.assertTrue(np.all(mask[70:, 50] == 0)) + assert np.all(mask[:31, 50] == 0) + assert np.all(mask[31:70, 50] == 1) + assert np.all(mask[70:, 50] == 0) # c) Top left to bottom right - self.assertTrue(np.all(mask[range(33), range(33)] == 0)) - self.assertTrue(np.all(mask[range(33, 68), range(33, 68)] == 1)) - self.assertTrue(np.all(mask[range(68, 101), range(68, 101)] == 0)) + assert np.all(mask[range(33), range(33)] == 0) + assert np.all(mask[range(33, 68), range(33, 68)] == 1) + assert np.all(mask[range(68, 101), range(68, 101)] == 0) # d) Bottom left to top right - self.assertTrue(np.all(mask[range(101-1, 68-1, -1), range(33)] == 0)) - self.assertTrue(np.all(mask[range(68-1, 33-1, -1), range(33, 68)] == 1)) - self.assertTrue(np.all(mask[range(33-1, -1, -1), range(68, 101)] == 0)) + assert np.all(mask[range(101 - 1, 68 - 1, -1), range(33)] == 0) + assert np.all(mask[range(68 - 1, 33 - 1, -1), range(33, 68)] == 1) + assert np.all(mask[range(33 - 1, -1, -1), range(68, 101)] == 0) - @mock.patch('satpy.readers.utils.AreaDefinition') + @mock.patch("satpy.readers.utils.AreaDefinition") def test_sub_area(self, adef): """Sub area slicing.""" area = mock.MagicMock() area.pixel_size_x = 1.5 area.pixel_size_y = 1.5 area.pixel_upper_left = (0, 0) - area.area_id = 'fakeid' - area.name = 'fake name' - area.proj_id = 'fakeproj' - area.crs = 'some_crs' + area.area_id = "fakeid" + area.name = "fake name" + area.proj_id = "fakeproj" + area.crs = "some_crs" hf.get_sub_area(area, slice(1, 4), slice(0, 3)) - adef.assert_called_once_with('fakeid', 'fake name', 'fakeproj', - 'some_crs', + adef.assert_called_once_with("fakeid", "fake name", "fakeproj", + "some_crs", 3, 3, (0.75, -3.75, 5.25, 0.75)) def test_np2str(self): """Test the np2str function.""" # byte object - npbytes = np.bytes_('hej') - self.assertEqual(hf.np2str(npbytes), 'hej') + npbytes = np.bytes_("hej") + assert hf.np2str(npbytes) == "hej" # single element numpy array np_arr = np.array([npbytes]) - self.assertEqual(hf.np2str(np_arr), 'hej') + assert hf.np2str(np_arr) == "hej" # scalar numpy array np_arr = np.array(npbytes) - self.assertEqual(hf.np2str(np_arr), 'hej') + assert hf.np2str(np_arr) == "hej" # multi-element array npbytes = np.array([npbytes, npbytes]) @@ -236,51 +236,51 @@ def re(lat): return n * np.sqrt((1 - e2)**2 * np.sin(lat)**2 + np.cos(lat)**2) for lon in (0, 180, 270): - self.assertEqual(hf.get_earth_radius(lon=lon, lat=0., a=a, b=b), a) + assert hf.get_earth_radius(lon=lon, lat=0.0, a=a, b=b) == a for lat in (90, -90): - self.assertEqual(hf.get_earth_radius(lon=0., lat=lat, a=a, b=b), b) - self.assertTrue(np.isclose(hf.get_earth_radius(lon=123, lat=45., a=a, b=b), re(45.))) + assert hf.get_earth_radius(lon=0.0, lat=lat, a=a, b=b) == b + assert np.isclose(hf.get_earth_radius(lon=123, lat=45.0, a=a, b=b), re(45.0)) def test_reduce_mda(self): """Test metadata size reduction.""" - mda = {'a': 1, - 'b': np.array([1, 2, 3]), - 'c': np.array([1, 2, 3, 4]), - 'd': {'a': 1, - 'b': np.array([1, 2, 3]), - 'c': np.array([1, 2, 3, 4]), - 'd': {'a': 1, - 'b': np.array([1, 2, 3]), - 'c': np.array([1, 2, 3, 4])}}} - exp = {'a': 1, - 'b': np.array([1, 2, 3]), - 'd': {'a': 1, - 'b': np.array([1, 2, 3]), - 'd': {'a': 1, - 'b': np.array([1, 2, 3])}}} + mda = {"a": 1, + "b": np.array([1, 2, 3]), + "c": np.array([1, 2, 3, 4]), + "d": {"a": 1, + "b": np.array([1, 2, 3]), + "c": np.array([1, 2, 3, 4]), + "d": {"a": 1, + "b": np.array([1, 2, 3]), + "c": np.array([1, 2, 3, 4])}}} + exp = {"a": 1, + "b": np.array([1, 2, 3]), + "d": {"a": 1, + "b": np.array([1, 2, 3]), + "d": {"a": 1, + "b": np.array([1, 2, 3])}}} numpy.testing.assert_equal(hf.reduce_mda(mda, max_size=3), exp) # Make sure, reduce_mda() doesn't modify the original dictionary - self.assertIn('c', mda) - self.assertIn('c', mda['d']) - self.assertIn('c', mda['d']['d']) + assert "c" in mda + assert "c" in mda["d"] + assert "c" in mda["d"]["d"] - @mock.patch('satpy.readers.utils.bz2.BZ2File') - @mock.patch('satpy.readers.utils.Popen') + @mock.patch("satpy.readers.utils.bz2.BZ2File") + @mock.patch("satpy.readers.utils.Popen") def test_unzip_file(self, mock_popen, mock_bz2): """Test the bz2 file unzipping techniques.""" process_mock = mock.Mock() - attrs = {'communicate.return_value': (b'output', b'error'), - 'returncode': 0} + attrs = {"communicate.return_value": (b"output", b"error"), + "returncode": 0} process_mock.configure_mock(**attrs) mock_popen.return_value = process_mock bz2_mock = mock.MagicMock() - bz2_mock.__enter__.return_value.read.return_value = b'TEST' + bz2_mock.__enter__.return_value.read.return_value = b"TEST" mock_bz2.return_value = bz2_mock - filename = 'tester.DAT.bz2' - whichstr = 'satpy.readers.utils.which' + filename = "tester.DAT.bz2" + whichstr = "satpy.readers.utils.which" segment = 3 segmentstr = str(segment).zfill(2) # no pbzip2 installed with prefix @@ -294,7 +294,7 @@ def test_unzip_file(self, mock_popen, mock_bz2): os.remove(new_fname) # pbzip2 installed without prefix with mock.patch(whichstr) as whichmock: - whichmock.return_value = '/usr/bin/pbzip2' + whichmock.return_value = "/usr/bin/pbzip2" new_fname = hf.unzip_file(filename) assert mock_popen.called assert os.path.exists(new_fname) @@ -302,21 +302,21 @@ def test_unzip_file(self, mock_popen, mock_bz2): if os.path.exists(new_fname): os.remove(new_fname) - filename = 'tester.DAT' + filename = "tester.DAT" new_fname = hf.unzip_file(filename) assert new_fname is None - @mock.patch('bz2.BZ2File') + @mock.patch("bz2.BZ2File") def test_generic_open_BZ2File(self, bz2_mock): """Test the generic_open method with bz2 filename input.""" mock_bz2_open = mock.MagicMock() - mock_bz2_open.read.return_value = b'TEST' + mock_bz2_open.read.return_value = b"TEST" bz2_mock.return_value = mock_bz2_open - filename = 'tester.DAT.bz2' + filename = "tester.DAT.bz2" with hf.generic_open(filename) as file_object: data = file_object.read() - assert data == b'TEST' + assert data == b"TEST" assert mock_bz2_open.read.called @@ -328,27 +328,27 @@ def test_generic_open_FSFile_MemoryFileSystem(self): fsf = FSFile(mem_file) with hf.generic_open(fsf) as file_object: data = file_object.read() - assert data == b'TEST' + assert data == b"TEST" - @mock.patch('satpy.readers.utils.open') + @mock.patch("satpy.readers.utils.open") def test_generic_open_filename(self, open_mock): """Test the generic_open method with filename (str).""" mock_fn_open = mock.MagicMock() - mock_fn_open.read.return_value = b'TEST' + mock_fn_open.read.return_value = b"TEST" open_mock.return_value = mock_fn_open filename = "test.DAT" with hf.generic_open(filename) as file_object: data = file_object.read() - assert data == b'TEST' + assert data == b"TEST" assert mock_fn_open.read.called - @mock.patch('bz2.decompress', return_value=b'TEST_DECOMPRESSED') + @mock.patch("bz2.decompress", return_value=b"TEST_DECOMPRESSED") def test_unzip_FSFile(self, bz2_mock): """Test the FSFile bz2 file unzipping techniques.""" mock_bz2_decompress = mock.MagicMock() - mock_bz2_decompress.return_value = b'TEST_DECOMPRESSED' + mock_bz2_decompress.return_value = b"TEST_DECOMPRESSED" segment = 3 segmentstr = str(segment).zfill(2) @@ -382,14 +382,14 @@ def test_unzip_FSFile(self, bz2_mock): os.remove(new_fname) @mock.patch("os.remove") - @mock.patch("satpy.readers.utils.unzip_file", return_value='dummy.txt') + @mock.patch("satpy.readers.utils.unzip_file", return_value="dummy.txt") def test_pro_reading_gets_unzipped_file(self, fake_unzip_file, fake_remove): """Test the bz2 file unzipping context manager.""" - filename = 'dummy.txt.bz2' + filename = "dummy.txt.bz2" expected_filename = filename[:-4] with hf.unzip_context(filename) as new_filename: - self.assertEqual(new_filename, expected_filename) + assert new_filename == expected_filename fake_unzip_file.assert_called_with(filename) fake_remove.assert_called_with(expected_filename) @@ -403,24 +403,24 @@ def test_apply_rad_correction(self): def test_get_user_calibration_factors(self): """Test the retrieval of user-supplied calibration factors.""" - radcor_dict = {'WV063': {'slope': 1.015, - 'offset': -0.0556}, - 'IR108': {'slo': 1.015, - 'off': -0.0556}} + radcor_dict = {"WV063": {"slope": 1.015, + "offset": -0.0556}, + "IR108": {"slo": 1.015, + "off": -0.0556}} # Test that correct values are returned from the dict - slope, offset = hf.get_user_calibration_factors('WV063', radcor_dict) - self.assertEqual(slope, 1.015) - self.assertEqual(offset, -0.0556) + slope, offset = hf.get_user_calibration_factors("WV063", radcor_dict) + assert slope == 1.015 + assert offset == -0.0556 # Test that channels not present in dict return 1.0, 0.0 with self.assertWarns(UserWarning): - slope, offset = hf.get_user_calibration_factors('IR097', radcor_dict) - self.assertEqual(slope, 1.) - self.assertEqual(offset, 0.) + slope, offset = hf.get_user_calibration_factors("IR097", radcor_dict) + assert slope == 1.0 + assert offset == 0.0 # Check that incorrect dict keys throw an error with self.assertRaises(KeyError): - hf.get_user_calibration_factors('IR108', radcor_dict) + hf.get_user_calibration_factors("IR108", radcor_dict) class TestSunEarthDistanceCorrection: @@ -431,15 +431,15 @@ def setup_method(self): self.test_date = datetime(2020, 8, 15, 13, 0, 40) raw_refl = xr.DataArray(da.from_array([10., 20., 40., 1., 98., 50.]), - attrs={'start_time': self.test_date, - 'scheduled_time': self.test_date}) + attrs={"start_time": self.test_date, + "scheduled_time": self.test_date}) corr_refl = xr.DataArray(da.from_array([ 10.25484833, 20.50969667, 41.01939333, 1.02548483, 100.49751367, 51.27424167]), - attrs={'start_time': self.test_date, - 'scheduled_time': self.test_date}, + attrs={"start_time": self.test_date, + "scheduled_time": self.test_date}, ) self.raw_refl = raw_refl self.corr_refl = corr_refl @@ -448,13 +448,13 @@ def test_get_utc_time(self): """Test the retrieval of scene time from a dataset.""" # First check correct time is returned with 'start_time' tmp_array = self.raw_refl.copy() - del tmp_array.attrs['scheduled_time'] + del tmp_array.attrs["scheduled_time"] utc_time = hf.get_array_date(tmp_array, None) assert utc_time == self.test_date # Now check correct time is returned with 'scheduled_time' tmp_array = self.raw_refl.copy() - del tmp_array.attrs['start_time'] + del tmp_array.attrs["start_time"] utc_time = hf.get_array_date(tmp_array, None) assert utc_time == self.test_date @@ -466,8 +466,8 @@ def test_get_utc_time(self): # Finally, ensure error is raised if no datetime is available tmp_array = self.raw_refl.copy() - del tmp_array.attrs['scheduled_time'] - del tmp_array.attrs['start_time'] + del tmp_array.attrs["scheduled_time"] + del tmp_array.attrs["start_time"] with pytest.raises(KeyError): hf.get_array_date(tmp_array, None) @@ -475,37 +475,37 @@ def test_apply_sunearth_corr(self): """Test the correction of reflectances with sun-earth distance.""" out_refl = hf.apply_earthsun_distance_correction(self.raw_refl) np.testing.assert_allclose(out_refl, self.corr_refl) - assert out_refl.attrs['sun_earth_distance_correction_applied'] + assert out_refl.attrs["sun_earth_distance_correction_applied"] assert isinstance(out_refl.data, da.Array) def test_remove_sunearth_corr(self): """Test the removal of the sun-earth distance correction.""" out_refl = hf.remove_earthsun_distance_correction(self.corr_refl) np.testing.assert_allclose(out_refl, self.raw_refl) - assert not out_refl.attrs['sun_earth_distance_correction_applied'] + assert not out_refl.attrs["sun_earth_distance_correction_applied"] assert isinstance(out_refl.data, da.Array) -@pytest.mark.parametrize("data, filename, mode", +@pytest.mark.parametrize(("data", "filename", "mode"), [(b"Hello", "dummy.dat", "b"), ("Hello", "dummy.txt", "t")]) def test_generic_open_binary(tmp_path, data, filename, mode): """Test the bz2 file unzipping context manager using dummy binary data.""" dummy_data = data dummy_filename = os.fspath(tmp_path / filename) - with open(dummy_filename, 'w' + mode) as f: + with open(dummy_filename, "w" + mode) as f: f.write(dummy_data) - with hf.generic_open(dummy_filename, 'r' + mode) as f: + with hf.generic_open(dummy_filename, "r" + mode) as f: read_binary_data = f.read() assert read_binary_data == dummy_data - dummy_filename = os.fspath(tmp_path / (filename + '.bz2')) - with hf.bz2.open(dummy_filename, 'w' + mode) as f: + dummy_filename = os.fspath(tmp_path / (filename + ".bz2")) + with hf.bz2.open(dummy_filename, "w" + mode) as f: f.write(dummy_data) - with hf.generic_open(dummy_filename, 'r' + mode) as f: + with hf.generic_open(dummy_filename, "r" + mode) as f: read_binary_data = f.read() assert read_binary_data == dummy_data diff --git a/satpy/tests/reader_tests/test_vaisala_gld360.py b/satpy/tests/reader_tests/test_vaisala_gld360.py index 9add08b1d2..c08450613a 100644 --- a/satpy/tests/reader_tests/test_vaisala_gld360.py +++ b/satpy/tests/reader_tests/test_vaisala_gld360.py @@ -34,13 +34,13 @@ def test_vaisala_gld360(self): expected_power = np.array([12.3, 13.2, -31.]) expected_lat = np.array([30.5342, -0.5727, 12.1529]) expected_lon = np.array([-90.1152, 104.0688, -10.8756]) - expected_time = np.array(['2017-06-20T00:00:00.007178000', '2017-06-20T00:00:00.020162000', - '2017-06-20T00:00:00.023183000'], dtype='datetime64[ns]') + expected_time = np.array(["2017-06-20T00:00:00.007178000", "2017-06-20T00:00:00.020162000", + "2017-06-20T00:00:00.023183000"], dtype="datetime64[ns]") filename = StringIO( - u'2017-06-20 00:00:00.007178 30.5342 -90.1152 12.3 kA\n' - '2017-06-20 00:00:00.020162 -0.5727 104.0688 13.2 kA\n' - '2017-06-20 00:00:00.023183 12.1529 -10.8756 -31.0 kA' + u"2017-06-20 00:00:00.007178 30.5342 -90.1152 12.3 kA\n" + "2017-06-20 00:00:00.020162 -0.5727 104.0688 13.2 kA\n" + "2017-06-20 00:00:00.023183 12.1529 -10.8756 -31.0 kA" ) filename_info = {} filetype_info = {} @@ -52,25 +52,25 @@ def test_vaisala_gld360(self): filename.close() # test power - dataset_id = make_dataid(name='power') - dataset_info = {'units': 'kA'} + dataset_id = make_dataid(name="power") + dataset_info = {"units": "kA"} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_power, rtol=1e-05) # test lat - dataset_id = make_dataid(name='latitude') + dataset_id = make_dataid(name="latitude") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_lat, rtol=1e-05) # test lon - dataset_id = make_dataid(name='longitude') + dataset_id = make_dataid(name="longitude") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_allclose(result, expected_lon, rtol=1e-05) # test time - dataset_id = make_dataid(name='time') + dataset_id = make_dataid(name="time") dataset_info = {} result = self.handler.get_dataset(dataset_id, dataset_info).values np.testing.assert_array_equal(result, expected_time) diff --git a/satpy/tests/reader_tests/test_vii_base_nc.py b/satpy/tests/reader_tests/test_vii_base_nc.py index c2afc04356..82c0e6a4e1 100644 --- a/satpy/tests/reader_tests/test_vii_base_nc.py +++ b/satpy/tests/reader_tests/test_vii_base_nc.py @@ -30,20 +30,20 @@ from satpy.readers.vii_base_nc import SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR, ViiNCBaseFileHandler -TEST_FILE = 'test_file_vii_base_nc.nc' +TEST_FILE = "test_file_vii_base_nc.nc" class TestViiNCBaseFileHandler(unittest.TestCase): """Test the ViiNCBaseFileHandler reader.""" - @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_geo_interpolation') + @mock.patch("satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_geo_interpolation") def setUp(self, pgi_): """Set up the test.""" # Easiest way to test the reader is to create a test netCDF file on the fly # uses a UUID to avoid permission conflicts during execution of tests in parallel self.test_file_name = TEST_FILE + str(uuid.uuid1()) + ".nc" - with Dataset(self.test_file_name, 'w') as nc: + with Dataset(self.test_file_name, "w") as nc: # Add global attributes nc.sensing_start_time_utc = "20170920173040.888" nc.sensing_end_time_utc = "20170920174117.555" @@ -51,50 +51,50 @@ def setUp(self, pgi_): nc.instrument = "test_instrument" # Create data group - g1 = nc.createGroup('data') + g1 = nc.createGroup("data") # Add dimensions to data group - g1.createDimension('num_pixels', 10) - g1.createDimension('num_lines', 100) + g1.createDimension("num_pixels", 10) + g1.createDimension("num_lines", 100) # Create data/measurement_data group - g1_1 = g1.createGroup('measurement_data') + g1_1 = g1.createGroup("measurement_data") # Add dimensions to data/measurement_data group - g1_1.createDimension('num_tie_points_act', 10) - g1_1.createDimension('num_tie_points_alt', 100) + g1_1.createDimension("num_tie_points_act", 10) + g1_1.createDimension("num_tie_points_alt", 100) # Add variables to data/measurement_data group - tpw = g1_1.createVariable('tpw', np.float32, dimensions=('num_pixels', 'num_lines')) + tpw = g1_1.createVariable("tpw", np.float32, dimensions=("num_pixels", "num_lines")) tpw[:] = 1. - tpw.test_attr = 'attr' - lon = g1_1.createVariable('longitude', + tpw.test_attr = "attr" + lon = g1_1.createVariable("longitude", np.float32, - dimensions=('num_tie_points_act', 'num_tie_points_alt')) + dimensions=("num_tie_points_act", "num_tie_points_alt")) lon[:] = 100. - lat = g1_1.createVariable('latitude', + lat = g1_1.createVariable("latitude", np.float32, - dimensions=('num_tie_points_act', 'num_tie_points_alt')) + dimensions=("num_tie_points_act", "num_tie_points_alt")) lat[:] = 10. # Create quality group - g2 = nc.createGroup('quality') + g2 = nc.createGroup("quality") # Add dimensions to quality group - g2.createDimension('gap_items', 2) + g2.createDimension("gap_items", 2) # Add variables to quality group - var = g2.createVariable('duration_of_product', np.double, dimensions=()) + var = g2.createVariable("duration_of_product", np.double, dimensions=()) var[:] = 1.0 - var = g2.createVariable('duration_of_data_present', np.double, dimensions=()) + var = g2.createVariable("duration_of_data_present", np.double, dimensions=()) var[:] = 2.0 - var = g2.createVariable('duration_of_data_missing', np.double, dimensions=()) + var = g2.createVariable("duration_of_data_missing", np.double, dimensions=()) var[:] = 3.0 - var = g2.createVariable('duration_of_data_degraded', np.double, dimensions=()) + var = g2.createVariable("duration_of_data_degraded", np.double, dimensions=()) var[:] = 4.0 - var = g2.createVariable('gap_start_time_utc', np.double, dimensions=('gap_items',)) + var = g2.createVariable("gap_start_time_utc", np.double, dimensions=("gap_items",)) var[:] = [5.0, 6.0] - var = g2.createVariable('gap_end_time_utc', np.double, dimensions=('gap_items',)) + var = g2.createVariable("gap_end_time_utc", np.double, dimensions=("gap_items",)) var[:] = [7.0, 8.0] # Create longitude and latitude "interpolated" arrays @@ -104,11 +104,11 @@ def setUp(self, pgi_): # Filename info valid for all readers filename_info = { - 'creation_time': datetime.datetime(year=2017, month=9, day=22, + "creation_time": datetime.datetime(year=2017, month=9, day=22, hour=22, minute=40, second=10), - 'sensing_start_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_start_time": datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), - 'sensing_end_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_end_time": datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50) } @@ -117,8 +117,8 @@ def setUp(self, pgi_): filename=self.test_file_name, filename_info=filename_info, filetype_info={ - 'cached_longitude': 'data/measurement_data/longitude', - 'cached_latitude': 'data/measurement_data/latitude' + "cached_longitude": "data/measurement_data/longitude", + "cached_latitude": "data/measurement_data/latitude" } ) @@ -128,10 +128,10 @@ def setUp(self, pgi_): filename=self.test_file_name, filename_info=filename_info, filetype_info={ - 'cached_longitude': 'data/measurement_data/longitude', - 'cached_latitude': 'data/measurement_data/latitude', - 'interpolate': False, - 'orthorect': False + "cached_longitude": "data/measurement_data/longitude", + "cached_latitude": "data/measurement_data/latitude", + "interpolate": False, + "orthorect": False }, orthorect=True ) @@ -158,36 +158,36 @@ def test_file_reading(self): # Checks that the basic functionalities are correctly executed expected_start_time = datetime.datetime(year=2017, month=9, day=20, hour=17, minute=30, second=40, microsecond=888000) - self.assertEqual(self.reader.start_time, expected_start_time) + assert self.reader.start_time == expected_start_time expected_end_time = datetime.datetime(year=2017, month=9, day=20, hour=17, minute=41, second=17, microsecond=555000) - self.assertEqual(self.reader.end_time, expected_end_time) + assert self.reader.end_time == expected_end_time - self.assertEqual(self.reader.spacecraft_name, "test_spacecraft") - self.assertEqual(self.reader.sensor, "test_instrument") - self.assertEqual(self.reader.ssp_lon, None) + assert self.reader.spacecraft_name == "test_spacecraft" + assert self.reader.sensor == "test_instrument" + assert self.reader.ssp_lon is None # Checks that the global attributes are correctly read expected_global_attributes = { - 'filename': self.test_file_name, - 'start_time': expected_start_time, - 'end_time': expected_end_time, - 'spacecraft_name': "test_spacecraft", - 'ssp_lon': None, - 'sensor': "test_instrument", - 'filename_start_time': datetime.datetime(year=2017, month=9, day=20, + "filename": self.test_file_name, + "start_time": expected_start_time, + "end_time": expected_end_time, + "spacecraft_name": "test_spacecraft", + "ssp_lon": None, + "sensor": "test_instrument", + "filename_start_time": datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), - 'filename_end_time': datetime.datetime(year=2017, month=9, day=20, + "filename_end_time": datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50), - 'platform_name': "test_spacecraft", - 'quality_group': { - 'duration_of_product': 1., - 'duration_of_data_present': 2., - 'duration_of_data_missing': 3., - 'duration_of_data_degraded': 4., - 'gap_start_time_utc': (5., 6.), - 'gap_end_time_utc': (7., 8.) + "platform_name": "test_spacecraft", + "quality_group": { + "duration_of_product": 1., + "duration_of_data_present": 2., + "duration_of_data_missing": 3., + "duration_of_data_degraded": 4., + "gap_start_time_utc": (5., 6.), + "gap_end_time_utc": (7., 8.) } } @@ -195,27 +195,27 @@ def test_file_reading(self): # Since the global_attributes dictionary contains numpy arrays, # it is not possible to peform a simple equality test # Must iterate on all keys to confirm that the dictionaries are equal - self.assertEqual(global_attributes.keys(), expected_global_attributes.keys()) + assert global_attributes.keys() == expected_global_attributes.keys() for key in expected_global_attributes: - if key not in ['quality_group']: + if key not in ["quality_group"]: # Quality check must be valid for both iterable and not iterable elements try: equal = all(global_attributes[key] == expected_global_attributes[key]) except (TypeError, ValueError): equal = global_attributes[key] == expected_global_attributes[key] - self.assertTrue(equal) + assert equal else: - self.assertEqual(global_attributes[key].keys(), expected_global_attributes[key].keys()) + assert global_attributes[key].keys() == expected_global_attributes[key].keys() for inner_key in global_attributes[key]: # Equality check must be valid for both iterable and not iterable elements try: equal = all(global_attributes[key][inner_key] == expected_global_attributes[key][inner_key]) except (TypeError, ValueError): equal = global_attributes[key][inner_key] == expected_global_attributes[key][inner_key] - self.assertTrue(equal) + assert equal - @mock.patch('satpy.readers.vii_base_nc.tie_points_interpolation') - @mock.patch('satpy.readers.vii_base_nc.tie_points_geo_interpolation') + @mock.patch("satpy.readers.vii_base_nc.tie_points_interpolation") + @mock.patch("satpy.readers.vii_base_nc.tie_points_geo_interpolation") def test_functions(self, tpgi_, tpi_): """Test the functions.""" with self.assertRaises(NotImplementedError): @@ -226,54 +226,54 @@ def test_functions(self, tpgi_, tpi_): # Checks that the _perform_interpolation function is correctly executed variable = xr.DataArray( - dims=('y', 'x'), - name='test_name', + dims=("y", "x"), + name="test_name", attrs={ - 'key_1': 'value_1', - 'key_2': 'value_2' + "key_1": "value_1", + "key_2": "value_2" }, data=np.zeros((10, 100)), ) tpi_.return_value = [xr.DataArray( - dims=('num_tie_points_act', 'num_tie_points_alt'), + dims=("num_tie_points_act", "num_tie_points_alt"), data=np.ones((10, 100)) )] return_value = self.reader._perform_interpolation(variable) tpi_.assert_called_with([variable], SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR) - self.assertTrue(np.allclose(return_value, np.ones((10, 100)))) - self.assertEqual(return_value.attrs, {'key_1': 'value_1', 'key_2': 'value_2'}) - self.assertEqual(return_value.name, 'test_name') - self.assertEqual(return_value.dims, ('num_pixels', 'num_lines')) + assert np.allclose(return_value, np.ones((10, 100))) + assert return_value.attrs == {"key_1": "value_1", "key_2": "value_2"} + assert return_value.name == "test_name" + assert return_value.dims == ("num_pixels", "num_lines") # Checks that the _perform_geo_interpolation function is correctly executed variable_lon = xr.DataArray( - dims=('y', 'x'), - name='test_lon', + dims=("y", "x"), + name="test_lon", attrs={ - 'key_1': 'value_lon_1', - 'key_2': 'value_lon_2' + "key_1": "value_lon_1", + "key_2": "value_lon_2" }, data=np.zeros((10, 100)) ) variable_lat = xr.DataArray( - dims=('y', 'x'), - name='test_lat', + dims=("y", "x"), + name="test_lat", attrs={ - 'key_1': 'value_lat_1', - 'key_2': 'value_lat_2' + "key_1": "value_lat_1", + "key_2": "value_lat_2" }, data=np.ones((10, 100)) * 2. ) tpgi_.return_value = ( xr.DataArray( - dims=('num_tie_points_act', 'num_tie_points_alt'), + dims=("num_tie_points_act", "num_tie_points_alt"), data=np.ones((10, 100)) ), xr.DataArray( - dims=('num_tie_points_act', 'num_tie_points_alt'), + dims=("num_tie_points_act", "num_tie_points_alt"), data=6 * np.ones((10, 100)) ) ) @@ -282,55 +282,55 @@ def test_functions(self, tpgi_, tpi_): tpgi_.assert_called_with(variable_lon, variable_lat, SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR) - self.assertTrue(np.allclose(return_lon, np.ones((10, 100)))) - self.assertEqual(return_lon.attrs, {'key_1': 'value_lon_1', 'key_2': 'value_lon_2'}) - self.assertEqual(return_lon.name, 'test_lon') - self.assertEqual(return_lon.dims, ('num_pixels', 'num_lines')) + assert np.allclose(return_lon, np.ones((10, 100))) + assert return_lon.attrs == {"key_1": "value_lon_1", "key_2": "value_lon_2"} + assert return_lon.name == "test_lon" + assert return_lon.dims == ("num_pixels", "num_lines") - self.assertTrue(np.allclose(return_lat, 6 * np.ones((10, 100)))) - self.assertEqual(return_lat.attrs, {'key_1': 'value_lat_1', 'key_2': 'value_lat_2'}) - self.assertEqual(return_lat.name, 'test_lat') - self.assertEqual(return_lat.dims, ('num_pixels', 'num_lines')) + assert np.allclose(return_lat, 6 * np.ones((10, 100))) + assert return_lat.attrs == {"key_1": "value_lat_1", "key_2": "value_lat_2"} + assert return_lat.name == "test_lat" + assert return_lat.dims == ("num_pixels", "num_lines") def test_standardize_dims(self): """Test the standardize dims function.""" test_variable = xr.DataArray( - dims=('num_pixels', 'num_lines'), - name='test_data', + dims=("num_pixels", "num_lines"), + name="test_data", attrs={ - 'key_1': 'value_lat_1', - 'key_2': 'value_lat_2' + "key_1": "value_lat_1", + "key_2": "value_lat_2" }, data=np.ones((10, 100)) * 1. ) out_variable = self.reader._standardize_dims(test_variable) - self.assertTrue(np.allclose(out_variable.values, np.ones((100, 10)))) - self.assertEqual(out_variable.dims, ('y', 'x')) - self.assertEqual(out_variable.attrs['key_1'], 'value_lat_1') + assert np.allclose(out_variable.values, np.ones((100, 10))) + assert out_variable.dims == ("y", "x") + assert out_variable.attrs["key_1"] == "value_lat_1" - @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_calibration') - @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_interpolation') - @mock.patch('satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_orthorectification') + @mock.patch("satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_calibration") + @mock.patch("satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_interpolation") + @mock.patch("satpy.readers.vii_base_nc.ViiNCBaseFileHandler._perform_orthorectification") def test_dataset(self, po_, pi_, pc_): """Test the execution of the get_dataset function.""" # Checks the correct execution of the get_dataset function with a valid file_key - variable = self.reader.get_dataset(None, {'file_key': 'data/measurement_data/tpw', - 'calibration': None}) + variable = self.reader.get_dataset(None, {"file_key": "data/measurement_data/tpw", + "calibration": None}) pc_.assert_not_called() pi_.assert_not_called() po_.assert_not_called() - self.assertTrue(np.allclose(variable.values, np.ones((100, 10)))) - self.assertEqual(variable.dims, ('y', 'x')) - self.assertEqual(variable.attrs['test_attr'], 'attr') - self.assertEqual(variable.attrs['units'], None) + assert np.allclose(variable.values, np.ones((100, 10))) + assert variable.dims == ("y", "x") + assert variable.attrs["test_attr"] == "attr" + assert variable.attrs["units"] is None # Checks the correct execution of the get_dataset function with a valid file_key # and required calibration and interpolation - self.reader.get_dataset(None, {'file_key': 'data/measurement_data/tpw', - 'calibration': 'reflectance', - 'interpolate': True, - 'standard_name': 'longitude'}) + self.reader.get_dataset(None, {"file_key": "data/measurement_data/tpw", + "calibration": "reflectance", + "interpolate": True, + "standard_name": "longitude"}) pc_.assert_called() pi_.assert_called() po_.assert_not_called() @@ -338,32 +338,32 @@ def test_dataset(self, po_, pi_, pc_): # Checks the correct execution of the get_dataset function with a valid file_key # and required orthorectification self.reader.orthorect = True - self.reader.get_dataset(None, {'file_key': 'data/measurement_data/tpw', - 'calibration': None, - 'orthorect_data': 'test_orthorect_data'}) + self.reader.get_dataset(None, {"file_key": "data/measurement_data/tpw", + "calibration": None, + "orthorect_data": "test_orthorect_data"}) po_.assert_called() # Checks the correct execution of the get_dataset function with an invalid file_key - invalid_dataset = self.reader.get_dataset(None, {'file_key': 'test_invalid', 'calibration': None}) + invalid_dataset = self.reader.get_dataset(None, {"file_key": "test_invalid", "calibration": None}) # Checks that the function returns None - self.assertEqual(invalid_dataset, None) + assert invalid_dataset is None pc_.reset_mock() pi_.reset_mock() po_.reset_mock() # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key - longitude = self.reader.get_dataset(None, {'file_key': 'cached_longitude', - 'calibration': 'reflectance', - 'interpolate': True}) + longitude = self.reader.get_dataset(None, {"file_key": "cached_longitude", + "calibration": "reflectance", + "interpolate": True}) pc_.assert_not_called() pi_.assert_not_called() - self.assertEqual(longitude[0, 0], 1.) + assert longitude[0, 0] == 1.0 # Checks the correct execution of the get_dataset function with a 'cached_latitude' file_key - latitude = self.reader.get_dataset(None, {'file_key': 'cached_latitude', - 'calibration': None}) - self.assertEqual(latitude[0, 0], 2.) + latitude = self.reader.get_dataset(None, {"file_key": "cached_latitude", + "calibration": None}) + assert latitude[0, 0] == 2.0 # Repeats some check with the reader where orthorectification and interpolation are inhibited # by means of the filetype_info flags @@ -374,30 +374,30 @@ def test_dataset(self, po_, pi_, pc_): # Checks the correct execution of the get_dataset function with a valid file_key # and required calibration and interpolation - self.reader_2.get_dataset(None, {'file_key': 'data/measurement_data/tpw', - 'calibration': 'reflectance', - 'interpolate': True, - 'standard_name': 'longitude'}) + self.reader_2.get_dataset(None, {"file_key": "data/measurement_data/tpw", + "calibration": "reflectance", + "interpolate": True, + "standard_name": "longitude"}) pc_.assert_called() pi_.assert_not_called() po_.assert_not_called() # Checks the correct execution of the get_dataset function with a valid file_key # and required orthorectification - self.reader_2.get_dataset(None, {'file_key': 'data/measurement_data/tpw', - 'calibration': None, - 'orthorect_data': 'test_orthorect_data'}) + self.reader_2.get_dataset(None, {"file_key": "data/measurement_data/tpw", + "calibration": None, + "orthorect_data": "test_orthorect_data"}) po_.assert_not_called() # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key - longitude = self.reader_2.get_dataset(None, {'file_key': 'cached_longitude', - 'calibration': None}) - self.assertEqual(longitude[0, 0], 100.) + longitude = self.reader_2.get_dataset(None, {"file_key": "cached_longitude", + "calibration": None}) + assert longitude[0, 0] == 100.0 # Checks the correct execution of the get_dataset function with a 'cached_longitude' file_key # in a reader without defined longitude - longitude = self.reader_3.get_dataset(None, {'file_key': 'cached_longitude', - 'calibration': 'reflectance', - 'interpolate': True}) + longitude = self.reader_3.get_dataset(None, {"file_key": "cached_longitude", + "calibration": "reflectance", + "interpolate": True}) # Checks that the function returns None - self.assertEqual(longitude, None) + assert longitude is None diff --git a/satpy/tests/reader_tests/test_vii_l1b_nc.py b/satpy/tests/reader_tests/test_vii_l1b_nc.py index cf33e7872e..d9ee714d09 100644 --- a/satpy/tests/reader_tests/test_vii_l1b_nc.py +++ b/satpy/tests/reader_tests/test_vii_l1b_nc.py @@ -35,7 +35,7 @@ from satpy.readers.vii_l1b_nc import ViiL1bNCFileHandler from satpy.readers.vii_utils import MEAN_EARTH_RADIUS -TEST_FILE = 'test_file_vii_l1b_nc.nc' +TEST_FILE = "test_file_vii_l1b_nc.nc" class TestViiL1bNCFileHandler(unittest.TestCase): @@ -47,51 +47,51 @@ def setUp(self): # uses a UUID to avoid permission conflicts during execution of tests in parallel self.test_file_name = TEST_FILE + str(uuid.uuid1()) + ".nc" - with Dataset(self.test_file_name, 'w') as nc: + with Dataset(self.test_file_name, "w") as nc: # Create data group - g1 = nc.createGroup('data') + g1 = nc.createGroup("data") # Add dimensions to data group - g1.createDimension('num_chan_solar', 11) - g1.createDimension('num_chan_thermal', 9) - g1.createDimension('num_pixels', 72) - g1.createDimension('num_lines', 600) + g1.createDimension("num_chan_solar", 11) + g1.createDimension("num_chan_thermal", 9) + g1.createDimension("num_pixels", 72) + g1.createDimension("num_lines", 600) # Create calibration_data group - g1_1 = g1.createGroup('calibration_data') + g1_1 = g1.createGroup("calibration_data") # Add variables to data/calibration_data group - bt_a = g1_1.createVariable('bt_conversion_a', np.float32, dimensions=('num_chan_thermal',)) + bt_a = g1_1.createVariable("bt_conversion_a", np.float32, dimensions=("num_chan_thermal",)) bt_a[:] = np.arange(9) - bt_b = g1_1.createVariable('bt_conversion_b', np.float32, dimensions=('num_chan_thermal',)) + bt_b = g1_1.createVariable("bt_conversion_b", np.float32, dimensions=("num_chan_thermal",)) bt_b[:] = np.arange(9) - cw = g1_1.createVariable('channel_cw_thermal', np.float32, dimensions=('num_chan_thermal',)) + cw = g1_1.createVariable("channel_cw_thermal", np.float32, dimensions=("num_chan_thermal",)) cw[:] = np.arange(9) - isi = g1_1.createVariable('Band_averaged_solar_irradiance', np.float32, dimensions=('num_chan_solar',)) + isi = g1_1.createVariable("Band_averaged_solar_irradiance", np.float32, dimensions=("num_chan_solar",)) isi[:] = np.arange(11) # Create measurement_data group - g1_2 = g1.createGroup('measurement_data') + g1_2 = g1.createGroup("measurement_data") # Add dimensions to data/measurement_data group - g1_2.createDimension('num_tie_points_act', 10) - g1_2.createDimension('num_tie_points_alt', 100) + g1_2.createDimension("num_tie_points_act", 10) + g1_2.createDimension("num_tie_points_alt", 100) # Add variables to data/measurement_data group - sza = g1_2.createVariable('solar_zenith', np.float32, - dimensions=('num_tie_points_alt', 'num_tie_points_act')) + sza = g1_2.createVariable("solar_zenith", np.float32, + dimensions=("num_tie_points_alt", "num_tie_points_act")) sza[:] = 25.0 - delta_lat = g1_2.createVariable('delta_lat', np.float32, dimensions=('num_lines', 'num_pixels')) + delta_lat = g1_2.createVariable("delta_lat", np.float32, dimensions=("num_lines", "num_pixels")) delta_lat[:] = 1.0 self.reader = ViiL1bNCFileHandler( filename=self.test_file_name, filename_info={ - 'creation_time': datetime.datetime(year=2017, month=9, day=22, + "creation_time": datetime.datetime(year=2017, month=9, day=22, hour=22, minute=40, second=10), - 'sensing_start_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_start_time": datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), - 'sensing_end_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_end_time": datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50) }, filetype_info={} @@ -115,53 +115,53 @@ def test_calibration_functions(self): bt = self.reader._calibrate_bt(radiance, cw, a, b) expected_bt = np.array([[675.04993213, 753.10301462, 894.93149648], [963.20401882, 1048.95086402, 1270.95546218]]) - self.assertTrue(np.allclose(bt, expected_bt)) + assert np.allclose(bt, expected_bt) angle_factor = 0.4 isi = 2.0 refl = self.reader._calibrate_refl(radiance, angle_factor, isi) expected_refl = np.array([[62.8318531, 125.6637061, 314.1592654], [439.8229715, 628.3185307, 1256.637061]]) - self.assertTrue(np.allclose(refl, expected_refl)) + assert np.allclose(refl, expected_refl) def test_functions(self): """Test the functions.""" # Checks that the _perform_orthorectification function is correctly executed variable = xr.DataArray( - dims=('num_lines', 'num_pixels'), - name='test_name', + dims=("num_lines", "num_pixels"), + name="test_name", attrs={ - 'key_1': 'value_1', - 'key_2': 'value_2' + "key_1": "value_1", + "key_2": "value_2" }, data=da.from_array(np.ones((600, 72))) ) - orthorect_variable = self.reader._perform_orthorectification(variable, 'data/measurement_data/delta_lat') + orthorect_variable = self.reader._perform_orthorectification(variable, "data/measurement_data/delta_lat") expected_values = np.degrees(np.ones((600, 72)) / MEAN_EARTH_RADIUS) + np.ones((600, 72)) - self.assertTrue(np.allclose(orthorect_variable.values, expected_values)) + assert np.allclose(orthorect_variable.values, expected_values) # Checks that the _perform_calibration function is correctly executed in all cases # radiance calibration: return value is simply a copy of the variable - return_variable = self.reader._perform_calibration(variable, {'calibration': 'radiance'}) - self.assertTrue(np.all(return_variable == variable)) + return_variable = self.reader._perform_calibration(variable, {"calibration": "radiance"}) + assert np.all(return_variable == variable) # invalid calibration: raises a ValueError with self.assertRaises(ValueError): self.reader._perform_calibration(variable, - {'calibration': 'invalid', 'name': 'test'}) + {"calibration": "invalid", "name": "test"}) # brightness_temperature calibration: checks that the return value is correct calibrated_variable = self.reader._perform_calibration(variable, - {'calibration': 'brightness_temperature', - 'chan_thermal_index': 3}) + {"calibration": "brightness_temperature", + "chan_thermal_index": 3}) expected_values = np.full((600, 72), 1101.10413712) - self.assertTrue(np.allclose(calibrated_variable.values, expected_values)) + assert np.allclose(calibrated_variable.values, expected_values) # reflectance calibration: checks that the return value is correct calibrated_variable = self.reader._perform_calibration(variable, - {'calibration': 'reflectance', - 'wavelength': [0.658, 0.668, 0.678], - 'chan_solar_index': 2}) + {"calibration": "reflectance", + "wavelength": [0.658, 0.668, 0.678], + "chan_solar_index": 2}) expected_values = np.full((600, 72), 173.3181982) - self.assertTrue(np.allclose(calibrated_variable.values, expected_values)) + assert np.allclose(calibrated_variable.values, expected_values) diff --git a/satpy/tests/reader_tests/test_vii_l2_nc.py b/satpy/tests/reader_tests/test_vii_l2_nc.py index e431d16e73..8348470d0f 100644 --- a/satpy/tests/reader_tests/test_vii_l2_nc.py +++ b/satpy/tests/reader_tests/test_vii_l2_nc.py @@ -30,7 +30,7 @@ from satpy.readers.vii_l2_nc import ViiL2NCFileHandler -TEST_FILE = 'test_file_vii_l2_nc.nc' +TEST_FILE = "test_file_vii_l2_nc.nc" class TestViiL2NCFileHandler(unittest.TestCase): @@ -42,29 +42,29 @@ def setUp(self): # uses a UUID to avoid permission conflicts during execution of tests in parallel self.test_file_name = TEST_FILE + str(uuid.uuid1()) + ".nc" - with Dataset(self.test_file_name, 'w') as nc: + with Dataset(self.test_file_name, "w") as nc: # Create data group - g1 = nc.createGroup('data') + g1 = nc.createGroup("data") # Add dimensions to data group - g1.createDimension('num_pixels', 100) - g1.createDimension('num_lines', 10) + g1.createDimension("num_pixels", 100) + g1.createDimension("num_lines", 10) # Create measurement_data group - g1_2 = g1.createGroup('measurement_data') + g1_2 = g1.createGroup("measurement_data") # Add variables to data/measurement_data group - delta_lat = g1_2.createVariable('delta_lat', np.float32, dimensions=('num_lines', 'num_pixels')) + delta_lat = g1_2.createVariable("delta_lat", np.float32, dimensions=("num_lines", "num_pixels")) delta_lat[:] = 0.1 self.reader = ViiL2NCFileHandler( filename=self.test_file_name, filename_info={ - 'creation_time': datetime.datetime(year=2017, month=9, day=22, + "creation_time": datetime.datetime(year=2017, month=9, day=22, hour=22, minute=40, second=10), - 'sensing_start_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_start_time": datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), - 'sensing_end_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_end_time": datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50) }, filetype_info={} @@ -82,16 +82,16 @@ def test_functions(self): """Test the functions.""" # Checks that the _perform_orthorectification function is correctly executed variable = xr.DataArray( - dims=('num_lines', 'num_pixels'), - name='test_name', + dims=("num_lines", "num_pixels"), + name="test_name", attrs={ - 'key_1': 'value_1', - 'key_2': 'value_2' + "key_1": "value_1", + "key_2": "value_2" }, data=da.from_array(np.ones((10, 100))) ) - orthorect_variable = self.reader._perform_orthorectification(variable, 'data/measurement_data/delta_lat') + orthorect_variable = self.reader._perform_orthorectification(variable, "data/measurement_data/delta_lat") expected_values = 1.1 * np.ones((10, 100)) - self.assertTrue(np.allclose(orthorect_variable.values, expected_values)) - self.assertEqual(orthorect_variable.attrs['key_1'], 'value_1') + assert np.allclose(orthorect_variable.values, expected_values) + assert orthorect_variable.attrs["key_1"] == "value_1" diff --git a/satpy/tests/reader_tests/test_vii_utils.py b/satpy/tests/reader_tests/test_vii_utils.py index ab90833887..8d9402e926 100644 --- a/satpy/tests/reader_tests/test_vii_utils.py +++ b/satpy/tests/reader_tests/test_vii_utils.py @@ -36,8 +36,8 @@ class TestViiUtils(unittest.TestCase): def test_constants(self): """Test the constant values.""" # Test the value of the constants - self.assertEqual(satpy.readers.vii_utils.C1, C1) - self.assertEqual(satpy.readers.vii_utils.C2, C2) - self.assertEqual(satpy.readers.vii_utils.TIE_POINTS_FACTOR, TIE_POINTS_FACTOR) - self.assertEqual(satpy.readers.vii_utils.SCAN_ALT_TIE_POINTS, SCAN_ALT_TIE_POINTS) - self.assertEqual(satpy.readers.vii_utils.MEAN_EARTH_RADIUS, MEAN_EARTH_RADIUS) + assert satpy.readers.vii_utils.C1 == C1 + assert satpy.readers.vii_utils.C2 == C2 + assert satpy.readers.vii_utils.TIE_POINTS_FACTOR == TIE_POINTS_FACTOR + assert satpy.readers.vii_utils.SCAN_ALT_TIE_POINTS == SCAN_ALT_TIE_POINTS + assert satpy.readers.vii_utils.MEAN_EARTH_RADIUS == MEAN_EARTH_RADIUS diff --git a/satpy/tests/reader_tests/test_vii_wv_nc.py b/satpy/tests/reader_tests/test_vii_wv_nc.py index 991bbecec4..63c5604187 100644 --- a/satpy/tests/reader_tests/test_vii_wv_nc.py +++ b/satpy/tests/reader_tests/test_vii_wv_nc.py @@ -30,7 +30,7 @@ from satpy.readers.vii_l2_nc import ViiL2NCFileHandler -TEST_FILE = 'test_file_vii_wv_nc.nc' +TEST_FILE = "test_file_vii_wv_nc.nc" class TestViiL2NCFileHandler(unittest.TestCase): @@ -42,29 +42,29 @@ def setUp(self): # uses a UUID to avoid permission conflicts during execution of tests in parallel self.test_file_name = TEST_FILE + str(uuid.uuid1()) + ".nc" - with Dataset(self.test_file_name, 'w') as nc: + with Dataset(self.test_file_name, "w") as nc: # Create data group - g1 = nc.createGroup('data') + g1 = nc.createGroup("data") # Add dimensions to data group - g1.createDimension('num_points_act', 100) - g1.createDimension('num_points_alt', 10) + g1.createDimension("num_points_act", 100) + g1.createDimension("num_points_alt", 10) # Create measurement_data group - g1_2 = g1.createGroup('measurement_data') + g1_2 = g1.createGroup("measurement_data") # Add variables to data/measurement_data group - delta_lat = g1_2.createVariable('delta_lat', np.float32, dimensions=('num_points_alt', 'num_points_act')) + delta_lat = g1_2.createVariable("delta_lat", np.float32, dimensions=("num_points_alt", "num_points_act")) delta_lat[:] = 0.1 self.reader = ViiL2NCFileHandler( filename=self.test_file_name, filename_info={ - 'creation_time': datetime.datetime(year=2017, month=9, day=22, + "creation_time": datetime.datetime(year=2017, month=9, day=22, hour=22, minute=40, second=10), - 'sensing_start_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_start_time": datetime.datetime(year=2017, month=9, day=20, hour=12, minute=30, second=30), - 'sensing_end_time': datetime.datetime(year=2017, month=9, day=20, + "sensing_end_time": datetime.datetime(year=2017, month=9, day=20, hour=18, minute=30, second=50) }, filetype_info={} @@ -82,16 +82,16 @@ def test_functions(self): """Test the functions.""" # Checks that the _perform_orthorectification function is correctly executed variable = xr.DataArray( - dims=('num_points_alt', 'num_points_act'), - name='test_name', + dims=("num_points_alt", "num_points_act"), + name="test_name", attrs={ - 'key_1': 'value_1', - 'key_2': 'value_2' + "key_1": "value_1", + "key_2": "value_2" }, data=da.from_array(np.ones((10, 100))) ) - orthorect_variable = self.reader._perform_orthorectification(variable, 'data/measurement_data/delta_lat') + orthorect_variable = self.reader._perform_orthorectification(variable, "data/measurement_data/delta_lat") expected_values = 1.1 * np.ones((10, 100)) - self.assertTrue(np.allclose(orthorect_variable.values, expected_values)) - self.assertEqual(orthorect_variable.attrs['key_1'], 'value_1') + assert np.allclose(orthorect_variable.values, expected_values) + assert orthorect_variable.attrs["key_1"] == "value_1" diff --git a/satpy/tests/reader_tests/test_viirs_atms_utils.py b/satpy/tests/reader_tests/test_viirs_atms_utils.py index 931cf6469e..cb388a5cab 100644 --- a/satpy/tests/reader_tests/test_viirs_atms_utils.py +++ b/satpy/tests/reader_tests/test_viirs_atms_utils.py @@ -34,8 +34,8 @@ def test_get_file_units(caplog): """Test get the file-units from the dataset info.""" - did = make_dataid(name='some_variable', modifiers=()) - ds_info = {'file_units': None} + did = make_dataid(name="some_variable", modifiers=()) + ds_info = {"file_units": None} with caplog.at_level(logging.DEBUG): file_units = _get_file_units(did, ds_info) @@ -47,20 +47,17 @@ def test_get_file_units(caplog): def test_get_scale_factors_for_units_unsupported_units(): """Test get scale factors for units, when units are not supported.""" factors = xr.DataArray(da.from_array(DEFAULT_FILE_FACTORS, chunks=1)) - file_units = 'unknown unit' - output_units = '%' - with pytest.raises(ValueError) as exec_info: + file_units = "unknown unit" + output_units = "%" + with pytest.raises(ValueError, match="Don't know how to convert 'unknown unit' to '%'"): _ = _get_scale_factors_for_units(factors, file_units, output_units) - expected = "Don't know how to convert 'unknown unit' to '%'" - assert str(exec_info.value) == expected - def test_get_scale_factors_for_units_reflectances(caplog): """Test get scale factors for units, when variable is supposed to be a reflectance.""" factors = xr.DataArray(da.from_array(DEFAULT_FILE_FACTORS, chunks=1)) - file_units = '1' - output_units = '%' + file_units = "1" + output_units = "%" with caplog.at_level(logging.DEBUG): retv = _get_scale_factors_for_units(factors, file_units, output_units) @@ -72,8 +69,8 @@ def test_get_scale_factors_for_units_reflectances(caplog): def test_get_scale_factors_for_units_tbs(caplog): """Test get scale factors for units, when variable is supposed to be a brightness temperature.""" factors = xr.DataArray(da.from_array(DEFAULT_FILE_FACTORS, chunks=1)) - file_units = 'W cm-2 sr-1' - output_units = 'W m-2 sr-1' + file_units = "W cm-2 sr-1" + output_units = "W m-2 sr-1" with caplog.at_level(logging.DEBUG): retv = _get_scale_factors_for_units(factors, file_units, output_units) diff --git a/satpy/tests/reader_tests/test_viirs_compact.py b/satpy/tests/reader_tests/test_viirs_compact.py index 03ef09124c..ba8fa6f312 100644 --- a/satpy/tests/reader_tests/test_viirs_compact.py +++ b/satpy/tests/reader_tests/test_viirs_compact.py @@ -30,7 +30,7 @@ # - tmp_path -@pytest.fixture +@pytest.fixture() def fake_dnb(): """Create fake DNB content.""" fake_dnb = { @@ -2418,7 +2418,7 @@ def fake_dnb(): return fake_dnb -@pytest.fixture +@pytest.fixture() def fake_dnb_file(fake_dnb, tmp_path): """Create an hdf5 file in viirs_compact format with DNB data in it.""" filename = tmp_path / "SVDNBC_j01_d20191025_t0611251_e0612478_b10015_c20191025062459000870_eum_ops.h5" @@ -2435,7 +2435,7 @@ class TestCompact: """Test class for reading compact viirs format.""" @pytest.fixture(autouse=True) - def setup_method(self, fake_dnb_file): + def _setup_method(self, fake_dnb_file): """Create a fake file from scratch.""" self.filename = fake_dnb_file self.client = None @@ -2445,17 +2445,17 @@ def _dataset_iterator(self): from satpy.tests.utils import make_dataid filename_info = {} - filetype_info = {'file_type': 'compact_dnb'} + filetype_info = {"file_type": "compact_dnb"} test = VIIRSCompactFileHandler(self.filename, filename_info, filetype_info) - dsid = make_dataid(name='DNB', calibration='radiance') + dsid = make_dataid(name="DNB", calibration="radiance") ds1 = test.get_dataset(dsid, {}) - dsid = make_dataid(name='longitude_dnb') - ds2 = test.get_dataset(dsid, {'standard_name': 'longitude'}) - dsid = make_dataid(name='latitude_dnb') - ds3 = test.get_dataset(dsid, {'standard_name': 'latitude'}) - dsid = make_dataid(name='solar_zenith_angle') - ds4 = test.get_dataset(dsid, {'standard_name': 'solar_zenith_angle'}) + dsid = make_dataid(name="longitude_dnb") + ds2 = test.get_dataset(dsid, {"standard_name": "longitude"}) + dsid = make_dataid(name="latitude_dnb") + ds3 = test.get_dataset(dsid, {"standard_name": "latitude"}) + dsid = make_dataid(name="solar_zenith_angle") + ds4 = test.get_dataset(dsid, {"standard_name": "solar_zenith_angle"}) for ds in [ds1, ds2, ds3, ds4]: yield ds @@ -2466,7 +2466,7 @@ def test_get_dataset(self): assert ds.shape == (752, 4064) assert ds.dtype == np.float32 assert ds.compute().shape == (752, 4064) - assert ds.attrs['rows_per_scan'] == 16 + assert ds.attrs["rows_per_scan"] == 16 def test_distributed(self): """Check that distributed computations work.""" diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index da6dc9a55b..9b13f384e2 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -33,7 +33,7 @@ import pytest import xarray as xr from pyresample import SwathDefinition -from pytest import TempPathFactory +from pytest import TempPathFactory # noqa: PT013 from pytest_lazyfixture import lazy_fixture I_COLS = 6400 diff --git a/satpy/tests/reader_tests/test_viirs_edr_active_fires.py b/satpy/tests/reader_tests/test_viirs_edr_active_fires.py index df94283fba..7bede07292 100644 --- a/satpy/tests/reader_tests/test_viirs_edr_active_fires.py +++ b/satpy/tests/reader_tests/test_viirs_edr_active_fires.py @@ -60,23 +60,23 @@ class FakeModFiresNetCDF4FileHandler(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} - file_content['/attr/data_id'] = "AFMOD" - file_content['/attr/satellite_name'] = "NPP" - file_content['/attr/instrument_name'] = 'VIIRS' - - file_content['Fire Pixels/FP_latitude'] = DEFAULT_LATLON_FILE_DATA - file_content['Fire Pixels/FP_longitude'] = DEFAULT_LATLON_FILE_DATA - file_content['Fire Pixels/FP_power'] = DEFAULT_POWER_FILE_DATA - file_content['Fire Pixels/FP_T13'] = DEFAULT_M13_FILE_DATA - file_content['Fire Pixels/FP_T13/attr/units'] = 'kelvins' - file_content['Fire Pixels/FP_confidence'] = DEFAULT_DETECTION_FILE_DATA - file_content['Fire Pixels/attr/units'] = 'none' - file_content['Fire Pixels/shape'] = DEFAULT_FILE_SHAPE - - attrs = ('FP_latitude', 'FP_longitude', 'FP_T13', 'FP_confidence') + file_content["/attr/data_id"] = "AFMOD" + file_content["/attr/satellite_name"] = "NPP" + file_content["/attr/instrument_name"] = "VIIRS" + + file_content["Fire Pixels/FP_latitude"] = DEFAULT_LATLON_FILE_DATA + file_content["Fire Pixels/FP_longitude"] = DEFAULT_LATLON_FILE_DATA + file_content["Fire Pixels/FP_power"] = DEFAULT_POWER_FILE_DATA + file_content["Fire Pixels/FP_T13"] = DEFAULT_M13_FILE_DATA + file_content["Fire Pixels/FP_T13/attr/units"] = "kelvins" + file_content["Fire Pixels/FP_confidence"] = DEFAULT_DETECTION_FILE_DATA + file_content["Fire Pixels/attr/units"] = "none" + file_content["Fire Pixels/shape"] = DEFAULT_FILE_SHAPE + + attrs = ("FP_latitude", "FP_longitude", "FP_T13", "FP_confidence") convert_file_content_to_data_array( file_content, attrs=attrs, - dims=('z', 'fakeDim0', 'fakeDim1')) + dims=("z", "fakeDim0", "fakeDim1")) return file_content @@ -86,21 +86,21 @@ class FakeImgFiresNetCDF4FileHandler(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} - file_content['/attr/data_id'] = "AFIMG" - file_content['/attr/satellite_name'] = "NPP" - file_content['/attr/instrument_name'] = 'VIIRS' - - file_content['Fire Pixels/FP_latitude'] = DEFAULT_LATLON_FILE_DATA - file_content['Fire Pixels/FP_longitude'] = DEFAULT_LATLON_FILE_DATA - file_content['Fire Pixels/FP_power'] = DEFAULT_POWER_FILE_DATA - file_content['Fire Pixels/FP_T4'] = DEFAULT_M13_FILE_DATA - file_content['Fire Pixels/FP_T4/attr/units'] = 'kelvins' - file_content['Fire Pixels/FP_confidence'] = DEFAULT_DETECTION_FILE_DATA - - attrs = ('FP_latitude', 'FP_longitude', 'FP_T13', 'FP_confidence') + file_content["/attr/data_id"] = "AFIMG" + file_content["/attr/satellite_name"] = "NPP" + file_content["/attr/instrument_name"] = "VIIRS" + + file_content["Fire Pixels/FP_latitude"] = DEFAULT_LATLON_FILE_DATA + file_content["Fire Pixels/FP_longitude"] = DEFAULT_LATLON_FILE_DATA + file_content["Fire Pixels/FP_power"] = DEFAULT_POWER_FILE_DATA + file_content["Fire Pixels/FP_T4"] = DEFAULT_M13_FILE_DATA + file_content["Fire Pixels/FP_T4/attr/units"] = "kelvins" + file_content["Fire Pixels/FP_confidence"] = DEFAULT_DETECTION_FILE_DATA + + attrs = ("FP_latitude", "FP_longitude", "FP_T13", "FP_confidence") convert_file_content_to_data_array( file_content, attrs=attrs, - dims=('z', 'fakeDim0', 'fakeDim1')) + dims=("z", "fakeDim0", "fakeDim1")) return file_content @@ -114,13 +114,13 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): platform_key = {"NPP": "Suomi-NPP", "J01": "NOAA-20", "J02": "NOAA-21"} - self.platform_name = platform_key.get(self.filename_info['satellite_name'].upper(), "unknown") + self.platform_name = platform_key.get(self.filename_info["satellite_name"].upper(), "unknown") def get_test_content(self): """Create fake test file content.""" - fake_file = io.StringIO(u'''\n\n\n\n\n\n\n\n\n\n\n\n\n\n + fake_file = io.StringIO(u"""\n\n\n\n\n\n\n\n\n\n\n\n\n\n 24.64015007, -107.57017517, 317.38290405, 0.75, 0.75, 40, 4.28618050 - 25.90660477, -100.06127167, 331.17962646, 0.75, 0.75, 81, 20.61096764''') + 25.90660477, -100.06127167, 331.17962646, 0.75, 0.75, 81, 20.61096764""") return dd.from_pandas(pd.read_csv(fake_file, skiprows=15, header=None, names=["latitude", "longitude", @@ -139,13 +139,13 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): def get_test_content(self): """Create fake test file content.""" - fake_file = io.StringIO(u'''\n\n\n\n\n\n\n\n\n\n\n\n\n\n + fake_file = io.StringIO(u"""\n\n\n\n\n\n\n\n\n\n\n\n\n\n 24.64015007, -107.57017517, 317.38290405, 0.75, 0.75, 40, 4.28618050 - 25.90660477, -100.06127167, 331.17962646, 0.75, 0.75, 81, 20.61096764''') + 25.90660477, -100.06127167, 331.17962646, 0.75, 0.75, 81, 20.61096764""") platform_key = {"NPP": "Suomi-NPP", "J01": "NOAA-20", "J02": "NOAA-21"} - self.platform_name = platform_key.get(self.filename_info['satellite_name'].upper(), "unknown") + self.platform_name = platform_key.get(self.filename_info["satellite_name"].upper(), "unknown") return dd.from_pandas(pd.read_csv(fake_file, skiprows=15, header=None, names=["latitude", "longitude", @@ -157,14 +157,14 @@ def get_test_content(self): class TestModVIIRSActiveFiresNetCDF4(unittest.TestCase): """Test VIIRS Fires Reader.""" - yaml_file = 'viirs_edr_active_fires.yaml' + yaml_file = "viirs_edr_active_fires.yaml" def setUp(self): """Wrap CDF4 file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) - self.p = mock.patch.object(VIIRSActiveFiresFileHandler, '__bases__', (FakeModFiresNetCDF4FileHandler,)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + self.p = mock.patch.object(VIIRSActiveFiresFileHandler, "__bases__", (FakeModFiresNetCDF4FileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -177,51 +177,51 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFMOD_j02_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc' + "AFMOD_j02_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc" ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_dataset(self): """Test loading all datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFMOD_j02_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc' + "AFMOD_j02_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc" ]) r.create_filehandlers(loadables) - datasets = r.load(['confidence_pct']) - self.assertEqual(len(datasets), 1) + datasets = r.load(["confidence_pct"]) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs['units'], '%') - self.assertEqual(v.attrs['_FillValue'], 255) - self.assertTrue(np.issubdtype(v.dtype, DEFAULT_DETECTION_FILE_DTYPE)) + assert v.attrs["units"] == "%" + assert v.attrs["_FillValue"] == 255 + assert np.issubdtype(v.dtype, DEFAULT_DETECTION_FILE_DTYPE) - datasets = r.load(['T13']) - self.assertEqual(len(datasets), 1) + datasets = r.load(["T13"]) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'K') + assert v.attrs["units"] == "K" - datasets = r.load(['power']) - self.assertEqual(len(datasets), 1) + datasets = r.load(["power"]) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'MW') - self.assertEqual(v.attrs['platform_name'], 'NOAA-21') - self.assertEqual(v.attrs['sensor'], 'viirs') + assert v.attrs["units"] == "MW" + assert v.attrs["platform_name"] == "NOAA-21" + assert v.attrs["sensor"] == "viirs" class TestImgVIIRSActiveFiresNetCDF4(unittest.TestCase): """Test VIIRS Fires Reader.""" - yaml_file = 'viirs_edr_active_fires.yaml' + yaml_file = "viirs_edr_active_fires.yaml" def setUp(self): """Wrap CDF4 file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) - self.p = mock.patch.object(VIIRSActiveFiresFileHandler, '__bases__', (FakeImgFiresNetCDF4FileHandler,)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + self.p = mock.patch.object(VIIRSActiveFiresFileHandler, "__bases__", (FakeImgFiresNetCDF4FileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -234,52 +234,52 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc' + "AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc" ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_dataset(self): """Test loading all datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc' + "AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.nc" ]) r.create_filehandlers(loadables) - datasets = r.load(['confidence_cat']) - self.assertEqual(len(datasets), 1) + datasets = r.load(["confidence_cat"]) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs['units'], '1') - self.assertEqual(v.attrs['flag_meanings'], ['low', 'medium', 'high']) - self.assertEqual(v.attrs['flag_values'], [7, 8, 9]) + assert v.attrs["units"] == "1" + assert v.attrs["flag_meanings"] == ["low", "medium", "high"] + assert v.attrs["flag_values"] == [7, 8, 9] - datasets = r.load(['T4']) - self.assertEqual(len(datasets), 1) + datasets = r.load(["T4"]) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'K') + assert v.attrs["units"] == "K" - datasets = r.load(['power']) - self.assertEqual(len(datasets), 1) + datasets = r.load(["power"]) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'MW') - self.assertEqual(v.attrs['platform_name'], 'Suomi-NPP') - self.assertEqual(v.attrs['sensor'], 'viirs') + assert v.attrs["units"] == "MW" + assert v.attrs["platform_name"] == "Suomi-NPP" + assert v.attrs["sensor"] == "viirs" -@mock.patch('satpy.readers.viirs_edr_active_fires.dd.read_csv') +@mock.patch("satpy.readers.viirs_edr_active_fires.dd.read_csv") class TestModVIIRSActiveFiresText(unittest.TestCase): """Test VIIRS Fires Reader.""" - yaml_file = 'viirs_edr_active_fires.yaml' + yaml_file = "viirs_edr_active_fires.yaml" def setUp(self): """Wrap file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresTextFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) - self.p = mock.patch.object(VIIRSActiveFiresTextFileHandler, '__bases__', (FakeModFiresTextFileHandler,)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + self.p = mock.patch.object(VIIRSActiveFiresTextFileHandler, "__bases__", (FakeModFiresTextFileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -292,50 +292,50 @@ def test_init(self, mock_obj): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFEDR_j01_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt' + "AFEDR_j01_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt" ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_dataset(self, csv_mock): """Test loading all datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFEDR_j01_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt' + "AFEDR_j01_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt" ]) r.create_filehandlers(loadables) - datasets = r.load(['confidence_pct']) - self.assertEqual(len(datasets), 1) + datasets = r.load(["confidence_pct"]) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs['units'], '%') + assert v.attrs["units"] == "%" - datasets = r.load(['T13']) - self.assertEqual(len(datasets), 1) + datasets = r.load(["T13"]) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'K') + assert v.attrs["units"] == "K" - datasets = r.load(['power']) - self.assertEqual(len(datasets), 1) + datasets = r.load(["power"]) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'MW') - self.assertEqual(v.attrs['platform_name'], 'NOAA-20') - self.assertEqual(v.attrs['sensor'], 'VIIRS') + assert v.attrs["units"] == "MW" + assert v.attrs["platform_name"] == "NOAA-20" + assert v.attrs["sensor"] == "VIIRS" -@mock.patch('satpy.readers.viirs_edr_active_fires.dd.read_csv') +@mock.patch("satpy.readers.viirs_edr_active_fires.dd.read_csv") class TestImgVIIRSActiveFiresText(unittest.TestCase): """Test VIIRS Fires Reader.""" - yaml_file = 'viirs_edr_active_fires.yaml' + yaml_file = "viirs_edr_active_fires.yaml" def setUp(self): """Wrap file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_active_fires import VIIRSActiveFiresTextFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) - self.p = mock.patch.object(VIIRSActiveFiresTextFileHandler, '__bases__', (FakeImgFiresTextFileHandler,)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + self.p = mock.patch.object(VIIRSActiveFiresTextFileHandler, "__bases__", (FakeImgFiresTextFileHandler,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -348,35 +348,35 @@ def test_init(self, mock_obj): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt' + "AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt" ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_dataset(self, mock_obj): """Test loading all datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt' + "AFIMG_npp_d20180829_t2015451_e2017093_b35434_c20180829210527716708_cspp_dev.txt" ]) r.create_filehandlers(loadables) - datasets = r.load(['confidence_cat']) - self.assertEqual(len(datasets), 1) + datasets = r.load(["confidence_cat"]) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs['units'], '1') - self.assertEqual(v.attrs['flag_meanings'], ['low', 'medium', 'high']) - self.assertEqual(v.attrs['flag_values'], [7, 8, 9]) + assert v.attrs["units"] == "1" + assert v.attrs["flag_meanings"] == ["low", "medium", "high"] + assert v.attrs["flag_values"] == [7, 8, 9] - datasets = r.load(['T4']) - self.assertEqual(len(datasets), 1) + datasets = r.load(["T4"]) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'K') + assert v.attrs["units"] == "K" - datasets = r.load(['power']) - self.assertEqual(len(datasets), 1) + datasets = r.load(["power"]) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'MW') - self.assertEqual(v.attrs['platform_name'], 'Suomi-NPP') - self.assertEqual(v.attrs['sensor'], 'VIIRS') + assert v.attrs["units"] == "MW" + assert v.attrs["platform_name"] == "Suomi-NPP" + assert v.attrs["sensor"] == "VIIRS" diff --git a/satpy/tests/reader_tests/test_viirs_edr_flood.py b/satpy/tests/reader_tests/test_viirs_edr_flood.py index 9b544dc9f1..b7bc9f0319 100644 --- a/satpy/tests/reader_tests/test_viirs_edr_flood.py +++ b/satpy/tests/reader_tests/test_viirs_edr_flood.py @@ -36,51 +36,51 @@ class FakeHDF4FileHandler2(FakeHDF4FileHandler): def get_test_content(self, filename, filename_info, filename_type): """Mimic reader input file content.""" file_content = {} - file_content['/attr/Satellitename'] = filename_info['platform_shortname'] - file_content['/attr/SensorIdentifyCode'] = 'VIIRS' + file_content["/attr/Satellitename"] = filename_info["platform_shortname"] + file_content["/attr/SensorIdentifyCode"] = "VIIRS" # only one dataset for the flood reader - file_content['WaterDetection'] = DEFAULT_FILE_DATA - file_content['WaterDetection/attr/_Fillvalue'] = 1 - file_content['WaterDetection/attr/scale_factor'] = 1. - file_content['WaterDetection/attr/add_offset'] = 0. - file_content['WaterDetection/attr/units'] = 'none' - file_content['WaterDetection/shape'] = DEFAULT_FILE_SHAPE - file_content['WaterDetection/attr/ProjectionMinLatitude'] = 15. - file_content['WaterDetection/attr/ProjectionMaxLatitude'] = 68. - file_content['WaterDetection/attr/ProjectionMinLongitude'] = -124. - file_content['WaterDetection/attr/ProjectionMaxLongitude'] = -61. + file_content["WaterDetection"] = DEFAULT_FILE_DATA + file_content["WaterDetection/attr/_Fillvalue"] = 1 + file_content["WaterDetection/attr/scale_factor"] = 1. + file_content["WaterDetection/attr/add_offset"] = 0. + file_content["WaterDetection/attr/units"] = "none" + file_content["WaterDetection/shape"] = DEFAULT_FILE_SHAPE + file_content["WaterDetection/attr/ProjectionMinLatitude"] = 15. + file_content["WaterDetection/attr/ProjectionMaxLatitude"] = 68. + file_content["WaterDetection/attr/ProjectionMinLongitude"] = -124. + file_content["WaterDetection/attr/ProjectionMaxLongitude"] = -61. # convert tp xarrays from xarray import DataArray for key, val in file_content.items(): if isinstance(val, np.ndarray): attrs = {} - for a in ['_Fillvalue', 'units', 'ProjectionMinLatitude', 'ProjectionMaxLongitude', - 'ProjectionMinLongitude', 'ProjectionMaxLatitude']: - if key + '/attr/' + a in file_content: - attrs[a] = file_content[key + '/attr/' + a] + for a in ["_Fillvalue", "units", "ProjectionMinLatitude", "ProjectionMaxLongitude", + "ProjectionMinLongitude", "ProjectionMaxLatitude"]: + if key + "/attr/" + a in file_content: + attrs[a] = file_content[key + "/attr/" + a] if val.ndim > 1: - file_content[key] = DataArray(val, dims=('fakeDim0', 'fakeDim1'), attrs=attrs) + file_content[key] = DataArray(val, dims=("fakeDim0", "fakeDim1"), attrs=attrs) else: file_content[key] = DataArray(val, attrs=attrs) - if 'y' not in file_content['WaterDetection'].dims: - file_content['WaterDetection'] = file_content['WaterDetection'].rename({'fakeDim0': 'x', 'fakeDim1': 'y'}) + if "y" not in file_content["WaterDetection"].dims: + file_content["WaterDetection"] = file_content["WaterDetection"].rename({"fakeDim0": "x", "fakeDim1": "y"}) return file_content class TestVIIRSEDRFloodReader(unittest.TestCase): """Test VIIRS EDR Flood Reader.""" - yaml_file = 'viirs_edr_flood.yaml' + yaml_file = "viirs_edr_flood.yaml" def setUp(self): """Wrap HDF4 file handler with own fake file handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_edr_flood import VIIRSEDRFlood - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) - self.p = mock.patch.object(VIIRSEDRFlood, '__bases__', (FakeHDF4FileHandler2,)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) + self.p = mock.patch.object(VIIRSEDRFlood, "__bases__", (FakeHDF4FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -93,34 +93,34 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_10_300_01.hdf' + "WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_10_300_01.hdf" ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_dataset(self): """Test loading all datasets from a full swath file.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_10_300_01.hdf' + "WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_10_300_01.hdf" ]) r.create_filehandlers(loadables) - datasets = r.load(['WaterDetection']) - self.assertEqual(len(datasets), 1) + datasets = r.load(["WaterDetection"]) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'none') + assert v.attrs["units"] == "none" def test_load_dataset_aoi(self): """Test loading all datasets from an area of interest file.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_001_10_300_01.hdf' + "WATER_VIIRS_Prj_SVI_npp_d20180824_t1828213_e1839433_b35361_cspp_dev_001_10_300_01.hdf" ]) r.create_filehandlers(loadables) - datasets = r.load(['WaterDetection']) - self.assertEqual(len(datasets), 1) + datasets = r.load(["WaterDetection"]) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'none') + assert v.attrs["units"] == "none" diff --git a/satpy/tests/reader_tests/test_viirs_l1b.py b/satpy/tests/reader_tests/test_viirs_l1b.py index 0d3b2ad1b9..e60f83cfd0 100644 --- a/satpy/tests/reader_tests/test_viirs_l1b.py +++ b/satpy/tests/reader_tests/test_viirs_l1b.py @@ -49,22 +49,22 @@ class FakeNetCDF4FileHandlerDay(FakeNetCDF4FileHandler): def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" - dt = filename_info.get('start_time', datetime(2016, 1, 1, 12, 0, 0)) + dt = filename_info.get("start_time", datetime(2016, 1, 1, 12, 0, 0)) file_type = filename[:5].lower() num_lines = DEFAULT_FILE_SHAPE[0] num_pixels = DEFAULT_FILE_SHAPE[1] num_scans = 5 num_luts = DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1] file_content = { - '/dimension/number_of_scans': num_scans, - '/dimension/number_of_lines': num_lines, - '/dimension/number_of_pixels': num_pixels, - '/dimension/number_of_LUT_values': num_luts, - '/attr/time_coverage_start': dt.strftime('%Y-%m-%dT%H:%M:%S.000Z'), - '/attr/time_coverage_end': (dt + timedelta(minutes=6)).strftime('%Y-%m-%dT%H:%M:%S.000Z'), - '/attr/orbit_number': 26384, - '/attr/instrument': 'VIIRS', - '/attr/platform': 'Suomi-NPP', + "/dimension/number_of_scans": num_scans, + "/dimension/number_of_lines": num_lines, + "/dimension/number_of_pixels": num_pixels, + "/dimension/number_of_LUT_values": num_luts, + "/attr/time_coverage_start": dt.strftime("%Y-%m-%dT%H:%M:%S.000Z"), + "/attr/time_coverage_end": (dt + timedelta(minutes=6)).strftime("%Y-%m-%dT%H:%M:%S.000Z"), + "/attr/orbit_number": 26384, + "/attr/instrument": "VIIRS", + "/attr/platform": "Suomi-NPP", } self._fill_contents_with_default_data(file_content, file_type) self._set_dataset_specific_metadata(file_content) @@ -73,57 +73,57 @@ def get_test_content(self, filename, filename_info, filetype_info): def _fill_contents_with_default_data(self, file_content, file_type): """Fill file contents with default data.""" - if file_type.startswith('vgeo'): - file_content['/attr/OrbitNumber'] = file_content.pop('/attr/orbit_number') - file_content['geolocation_data/latitude'] = DEFAULT_LAT_DATA - file_content['geolocation_data/longitude'] = DEFAULT_LON_DATA - file_content['geolocation_data/solar_zenith'] = DEFAULT_LON_DATA - file_content['geolocation_data/solar_azimuth'] = DEFAULT_LON_DATA - file_content['geolocation_data/sensor_zenith'] = DEFAULT_LON_DATA - file_content['geolocation_data/sensor_azimuth'] = DEFAULT_LON_DATA - if file_type.endswith('d'): - file_content['geolocation_data/lunar_zenith'] = DEFAULT_LON_DATA - file_content['geolocation_data/lunar_azimuth'] = DEFAULT_LON_DATA - elif file_type == 'vl1bm': + if file_type.startswith("vgeo"): + file_content["/attr/OrbitNumber"] = file_content.pop("/attr/orbit_number") + file_content["geolocation_data/latitude"] = DEFAULT_LAT_DATA + file_content["geolocation_data/longitude"] = DEFAULT_LON_DATA + file_content["geolocation_data/solar_zenith"] = DEFAULT_LON_DATA + file_content["geolocation_data/solar_azimuth"] = DEFAULT_LON_DATA + file_content["geolocation_data/sensor_zenith"] = DEFAULT_LON_DATA + file_content["geolocation_data/sensor_azimuth"] = DEFAULT_LON_DATA + if file_type.endswith("d"): + file_content["geolocation_data/lunar_zenith"] = DEFAULT_LON_DATA + file_content["geolocation_data/lunar_azimuth"] = DEFAULT_LON_DATA + elif file_type == "vl1bm": for m_band in self.M_BANDS: - file_content[f'observation_data/{m_band}'] = DEFAULT_FILE_DATA - elif file_type == 'vl1bi': + file_content[f"observation_data/{m_band}"] = DEFAULT_FILE_DATA + elif file_type == "vl1bi": for i_band in self.I_BANDS: - file_content[f'observation_data/{i_band}'] = DEFAULT_FILE_DATA - elif file_type == 'vl1bd': - file_content['observation_data/DNB_observations'] = DEFAULT_FILE_DATA - file_content['observation_data/DNB_observations/attr/units'] = 'Watts/cm^2/steradian' + file_content[f"observation_data/{i_band}"] = DEFAULT_FILE_DATA + elif file_type == "vl1bd": + file_content["observation_data/DNB_observations"] = DEFAULT_FILE_DATA + file_content["observation_data/DNB_observations/attr/units"] = "Watts/cm^2/steradian" @staticmethod def _set_dataset_specific_metadata(file_content): """Set dataset-specific metadata.""" for k in list(file_content.keys()): - if not k.startswith('observation_data') and not k.startswith('geolocation_data'): + if not k.startswith("observation_data") and not k.startswith("geolocation_data"): continue - file_content[k + '/shape'] = DEFAULT_FILE_SHAPE - if k[-3:] in ['M12', 'M13', 'M14', 'M15', 'M16', 'I04', 'I05']: - file_content[k + '_brightness_temperature_lut'] = DEFAULT_FILE_DATA.ravel() - file_content[k + '_brightness_temperature_lut/attr/units'] = 'Kelvin' - file_content[k + '_brightness_temperature_lut/attr/valid_min'] = 0 - file_content[k + '_brightness_temperature_lut/attr/valid_max'] = 65534 - file_content[k + '_brightness_temperature_lut/attr/_FillValue'] = 65535 - file_content[k + '/attr/units'] = 'Watts/meter^2/steradian/micrometer' - elif k[-3:] in ['M01', 'M02', 'M03', 'M04', 'M05', 'M06', 'M07', 'M08', - 'M09', 'M10', 'M11', 'I01', 'I02', 'I03']: - file_content[k + '/attr/radiance_units'] = 'Watts/meter^2/steradian/micrometer' - file_content[k + '/attr/radiance_scale_factor'] = 1.1 - file_content[k + '/attr/radiance_add_offset'] = 0.1 - elif k.endswith('longitude'): - file_content[k + '/attr/units'] = 'degrees_east' - elif k.endswith('latitude'): - file_content[k + '/attr/units'] = 'degrees_north' - elif k.endswith('zenith') or k.endswith('azimuth'): - file_content[k + '/attr/units'] = 'degrees' - file_content[k + '/attr/valid_min'] = 0 - file_content[k + '/attr/valid_max'] = 65534 - file_content[k + '/attr/_FillValue'] = 65535 - file_content[k + '/attr/scale_factor'] = 1.1 - file_content[k + '/attr/add_offset'] = 0.1 + file_content[k + "/shape"] = DEFAULT_FILE_SHAPE + if k[-3:] in ["M12", "M13", "M14", "M15", "M16", "I04", "I05"]: + file_content[k + "_brightness_temperature_lut"] = DEFAULT_FILE_DATA.ravel() + file_content[k + "_brightness_temperature_lut/attr/units"] = "Kelvin" + file_content[k + "_brightness_temperature_lut/attr/valid_min"] = 0 + file_content[k + "_brightness_temperature_lut/attr/valid_max"] = 65534 + file_content[k + "_brightness_temperature_lut/attr/_FillValue"] = 65535 + file_content[k + "/attr/units"] = "Watts/meter^2/steradian/micrometer" + elif k[-3:] in ["M01", "M02", "M03", "M04", "M05", "M06", "M07", "M08", + "M09", "M10", "M11", "I01", "I02", "I03"]: + file_content[k + "/attr/radiance_units"] = "Watts/meter^2/steradian/micrometer" + file_content[k + "/attr/radiance_scale_factor"] = 1.1 + file_content[k + "/attr/radiance_add_offset"] = 0.1 + elif k.endswith("longitude"): + file_content[k + "/attr/units"] = "degrees_east" + elif k.endswith("latitude"): + file_content[k + "/attr/units"] = "degrees_north" + elif k.endswith("zenith") or k.endswith("azimuth"): + file_content[k + "/attr/units"] = "degrees" + file_content[k + "/attr/valid_min"] = 0 + file_content[k + "/attr/valid_max"] = 65534 + file_content[k + "/attr/_FillValue"] = 65535 + file_content[k + "/attr/scale_factor"] = 1.1 + file_content[k + "/attr/add_offset"] = 0.1 class FakeNetCDF4FileHandlerNight(FakeNetCDF4FileHandlerDay): @@ -149,9 +149,9 @@ def setup_method(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_l1b import VIIRSL1BFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(VIIRSL1BFileHandler, '__bases__', (self.fake_cls,)) + self.p = mock.patch.object(VIIRSL1BFileHandler, "__bases__", (self.fake_cls,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -164,7 +164,7 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", ]) assert len(loadables) == 1 r.create_filehandlers(loadables) @@ -176,8 +176,8 @@ def test_available_datasets_m_bands(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOM_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) avail_names = r.available_dataset_names @@ -190,52 +190,52 @@ def test_load_every_m_band_bt(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOM_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['M12', - 'M13', - 'M14', - 'M15', - 'M16']) + datasets = r.load(["M12", + "M13", + "M14", + "M15", + "M16"]) assert len(datasets) == 5 for v in datasets.values(): - assert v.attrs['calibration'] == 'brightness_temperature' - assert v.attrs['units'] == 'K' - assert v.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2 - assert v.attrs['sensor'] == "viirs" + assert v.attrs["calibration"] == "brightness_temperature" + assert v.attrs["units"] == "K" + assert v.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lons.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lats.attrs["rows_per_scan"] == 2 + assert v.attrs["sensor"] == "viirs" def test_load_every_m_band_refl(self): """Test loading all M band reflectances.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOM_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['M01', - 'M02', - 'M03', - 'M04', - 'M05', - 'M06', - 'M07', - 'M08', - 'M09', - 'M10', - 'M11']) + datasets = r.load(["M01", + "M02", + "M03", + "M04", + "M05", + "M06", + "M07", + "M08", + "M09", + "M10", + "M11"]) assert len(datasets) == (11 if self.has_reflectance_bands else 0) for v in datasets.values(): - assert v.attrs['calibration'] == 'reflectance' - assert v.attrs['units'] == '%' - assert v.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2 - assert v.attrs['sensor'] == "viirs" + assert v.attrs["calibration"] == "reflectance" + assert v.attrs["units"] == "%" + assert v.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lons.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lats.attrs["rows_per_scan"] == 2 + assert v.attrs["sensor"] == "viirs" def test_load_every_m_band_rad(self): """Test loading all M bands as radiances.""" @@ -243,34 +243,34 @@ def test_load_every_m_band_rad(self): from satpy.tests.utils import make_dataid r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOM_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) - datasets = r.load([make_dataid(name='M01', calibration='radiance'), - make_dataid(name='M02', calibration='radiance'), - make_dataid(name='M03', calibration='radiance'), - make_dataid(name='M04', calibration='radiance'), - make_dataid(name='M05', calibration='radiance'), - make_dataid(name='M06', calibration='radiance'), - make_dataid(name='M07', calibration='radiance'), - make_dataid(name='M08', calibration='radiance'), - make_dataid(name='M09', calibration='radiance'), - make_dataid(name='M10', calibration='radiance'), - make_dataid(name='M11', calibration='radiance'), - make_dataid(name='M12', calibration='radiance'), - make_dataid(name='M13', calibration='radiance'), - make_dataid(name='M14', calibration='radiance'), - make_dataid(name='M15', calibration='radiance'), - make_dataid(name='M16', calibration='radiance')]) + datasets = r.load([make_dataid(name="M01", calibration="radiance"), + make_dataid(name="M02", calibration="radiance"), + make_dataid(name="M03", calibration="radiance"), + make_dataid(name="M04", calibration="radiance"), + make_dataid(name="M05", calibration="radiance"), + make_dataid(name="M06", calibration="radiance"), + make_dataid(name="M07", calibration="radiance"), + make_dataid(name="M08", calibration="radiance"), + make_dataid(name="M09", calibration="radiance"), + make_dataid(name="M10", calibration="radiance"), + make_dataid(name="M11", calibration="radiance"), + make_dataid(name="M12", calibration="radiance"), + make_dataid(name="M13", calibration="radiance"), + make_dataid(name="M14", calibration="radiance"), + make_dataid(name="M15", calibration="radiance"), + make_dataid(name="M16", calibration="radiance")]) assert len(datasets) == (16 if self.has_reflectance_bands else 5) for v in datasets.values(): - assert v.attrs['calibration'] == 'radiance' - assert v.attrs['units'] == 'W m-2 um-1 sr-1' - assert v.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2 - assert v.attrs['sensor'] == "viirs" + assert v.attrs["calibration"] == "radiance" + assert v.attrs["units"] == "W m-2 um-1 sr-1" + assert v.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lons.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lats.attrs["rows_per_scan"] == 2 + assert v.attrs["sensor"] == "viirs" def test_load_i_band_angles(self): """Test loading all M bands as radiances.""" @@ -278,65 +278,65 @@ def test_load_i_band_angles(self): from satpy.tests.utils import make_dataid r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BI_snpp_d20161130_t012400_c20161130054822.nc', - 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOI_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BI_snpp_d20161130_t012400_c20161130054822.nc", + "VL1BM_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOI_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOM_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) datasets = r.load([ - make_dataid(name='satellite_zenith_angle'), - make_dataid(name='satellite_azimuth_angle'), - make_dataid(name='solar_azimuth_angle'), - make_dataid(name='solar_zenith_angle'), + make_dataid(name="satellite_zenith_angle"), + make_dataid(name="satellite_azimuth_angle"), + make_dataid(name="solar_azimuth_angle"), + make_dataid(name="solar_zenith_angle"), ]) assert len(datasets) == 4 for v in datasets.values(): - assert v.attrs['resolution'] == 371 - assert v.attrs['sensor'] == "viirs" + assert v.attrs["resolution"] == 371 + assert v.attrs["sensor"] == "viirs" def test_load_dnb_radiance(self): """Test loading the main DNB dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BD_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOD_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BD_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOD_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['DNB']) + datasets = r.load(["DNB"]) assert len(datasets) == 1 for v in datasets.values(): - assert v.attrs['calibration'] == 'radiance' - assert v.attrs['units'] == 'W m-2 sr-1' - assert v.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2 - assert v.attrs['sensor'] == "viirs" + assert v.attrs["calibration"] == "radiance" + assert v.attrs["units"] == "W m-2 sr-1" + assert v.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lons.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lats.attrs["rows_per_scan"] == 2 + assert v.attrs["sensor"] == "viirs" def test_load_dnb_angles(self): """Test loading all DNB angle datasets.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'VL1BD_snpp_d20161130_t012400_c20161130054822.nc', - 'VGEOD_snpp_d20161130_t012400_c20161130054822.nc', + "VL1BD_snpp_d20161130_t012400_c20161130054822.nc", + "VGEOD_snpp_d20161130_t012400_c20161130054822.nc", ]) r.create_filehandlers(loadables) - datasets = r.load(['dnb_solar_zenith_angle', - 'dnb_solar_azimuth_angle', - 'dnb_satellite_zenith_angle', - 'dnb_satellite_azimuth_angle', - 'dnb_lunar_zenith_angle', - 'dnb_lunar_azimuth_angle', + datasets = r.load(["dnb_solar_zenith_angle", + "dnb_solar_azimuth_angle", + "dnb_satellite_zenith_angle", + "dnb_satellite_azimuth_angle", + "dnb_lunar_zenith_angle", + "dnb_lunar_azimuth_angle", ]) assert len(datasets) == 6 for v in datasets.values(): - assert v.attrs['units'] == 'degrees' - assert v.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2 - assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2 - assert v.attrs['sensor'] == "viirs" + assert v.attrs["units"] == "degrees" + assert v.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lons.attrs["rows_per_scan"] == 2 + assert v.attrs["area"].lats.attrs["rows_per_scan"] == 2 + assert v.attrs["sensor"] == "viirs" class TestVIIRSL1BReaderDayNight(TestVIIRSL1BReaderDay): diff --git a/satpy/tests/reader_tests/test_viirs_sdr.py b/satpy/tests/reader_tests/test_viirs_sdr.py index ed50214c15..952224daaf 100644 --- a/satpy/tests/reader_tests/test_viirs_sdr.py +++ b/satpy/tests/reader_tests/test_viirs_sdr.py @@ -49,18 +49,18 @@ def __init__(self, filename, filename_info, filetype_info, include_factors=True) @staticmethod def _add_basic_metadata_to_file_content(file_content, filename_info, num_grans): - start_time = filename_info['start_time'] - end_time = filename_info['end_time'].replace(year=start_time.year, + start_time = filename_info["start_time"] + end_time = filename_info["end_time"].replace(year=start_time.year, month=start_time.month, day=start_time.day) - begin_date = start_time.strftime('%Y%m%d') + begin_date = start_time.strftime("%Y%m%d") begin_date = np.array(begin_date) - begin_time = start_time.strftime('%H%M%S.%fZ') + begin_time = start_time.strftime("%H%M%S.%fZ") begin_time = np.array(begin_time) - ending_date = end_time.strftime('%Y%m%d') - ending_time = end_time.strftime('%H%M%S.%fZ') + ending_date = end_time.strftime("%Y%m%d") + ending_time = end_time.strftime("%H%M%S.%fZ") new_file_content = { "{prefix2}/attr/AggregateNumberGranules": num_grans, "{prefix2}/attr/AggregateBeginningDate": begin_date, @@ -69,8 +69,8 @@ def _add_basic_metadata_to_file_content(file_content, filename_info, num_grans): "{prefix2}/attr/AggregateEndingTime": ending_time, "{prefix2}/attr/G-Ring_Longitude": np.array([0.0, 0.1, 0.2, 0.3]), "{prefix2}/attr/G-Ring_Latitude": np.array([0.0, 0.1, 0.2, 0.3]), - "{prefix2}/attr/AggregateBeginningOrbitNumber": "{0:d}".format(filename_info['orbit']), - "{prefix2}/attr/AggregateEndingOrbitNumber": "{0:d}".format(filename_info['orbit']), + "{prefix2}/attr/AggregateBeginningOrbitNumber": "{0:d}".format(filename_info["orbit"]), + "{prefix2}/attr/AggregateEndingOrbitNumber": "{0:d}".format(filename_info["orbit"]), "{prefix1}/attr/Instrument_Short_Name": "VIIRS", "/attr/Platform_Short_Name": "NPP", } @@ -84,13 +84,13 @@ def _add_granule_specific_info_to_file_content( lats_lists = self._get_per_granule_lats() file_content["{prefix3}/NumberOfScans"] = np.array([48] * num_granules) for granule_idx in range(num_granules): - prefix_gran = '{prefix}/{dataset_group}_Gran_{idx}'.format(prefix=gran_group_prefix, + prefix_gran = "{prefix}/{dataset_group}_Gran_{idx}".format(prefix=gran_group_prefix, dataset_group=dataset_group, idx=granule_idx) num_scans = num_scans_per_granule[granule_idx] - file_content[prefix_gran + '/attr/N_Number_Of_Scans'] = num_scans - file_content[prefix_gran + '/attr/G-Ring_Longitude'] = lons_lists[granule_idx] - file_content[prefix_gran + '/attr/G-Ring_Latitude'] = lats_lists[granule_idx] + file_content[prefix_gran + "/attr/N_Number_Of_Scans"] = num_scans + file_content[prefix_gran + "/attr/G-Ring_Longitude"] = lons_lists[granule_idx] + file_content[prefix_gran + "/attr/G-Ring_Latitude"] = lats_lists[granule_idx] @staticmethod def _get_per_granule_lons(): @@ -154,13 +154,13 @@ def _add_data_info_to_file_content(self, file_content, filename, data_var_prefix # SDR files always produce data with 48 scans per granule even if there are less total_rows = DEFAULT_FILE_SHAPE[0] * 48 * num_grans new_shape = (total_rows, DEFAULT_FILE_SHAPE[1]) - if filename[2:5] in ['M{:02d}'.format(x) for x in range(12)] + ['I01', 'I02', 'I03']: - keys = ['Radiance', 'Reflectance'] - elif filename[2:5] in ['M{:02d}'.format(x) for x in range(12, 17)] + ['I04', 'I05']: - keys = ['Radiance', 'BrightnessTemperature'] + if filename[2:5] in ["M{:02d}".format(x) for x in range(12)] + ["I01", "I02", "I03"]: + keys = ["Radiance", "Reflectance"] + elif filename[2:5] in ["M{:02d}".format(x) for x in range(12, 17)] + ["I04", "I05"]: + keys = ["Radiance", "BrightnessTemperature"] else: # DNB - keys = ['Radiance'] + keys = ["Radiance"] for k in keys: k = data_var_prefix + "/" + k @@ -175,7 +175,7 @@ def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefi # SDR files always produce data with 48 scans per granule even if there are less total_rows = DEFAULT_FILE_SHAPE[0] * 48 * num_grans new_shape = (total_rows, DEFAULT_FILE_SHAPE[1]) - is_dnb = filename[:5] not in ['GMODO', 'GIMGO'] + is_dnb = filename[:5] not in ["GMODO", "GIMGO"] if not is_dnb: lon_data = np.linspace(15, 55, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) lat_data = np.linspace(55, 75, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE) @@ -194,12 +194,12 @@ def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefi file_content[k] = np.repeat([file_content[k]], total_rows, axis=0) file_content[k + "/shape"] = new_shape - angles = ['SolarZenithAngle', - 'SolarAzimuthAngle', - 'SatelliteZenithAngle', - 'SatelliteAzimuthAngle'] + angles = ["SolarZenithAngle", + "SolarAzimuthAngle", + "SatelliteZenithAngle", + "SatelliteAzimuthAngle"] if is_dnb: - angles += ['LunarZenithAngle', 'LunarAzimuthAngle'] + angles += ["LunarZenithAngle", "LunarAzimuthAngle"] for k in angles: k = data_var_prefix + "/" + k file_content[k] = lon_data # close enough to SZA @@ -208,14 +208,14 @@ def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefi @staticmethod def _add_geo_ref(file_content, filename): - if filename[:3] == 'SVI': - geo_prefix = 'GIMGO' - elif filename[:3] == 'SVM': - geo_prefix = 'GMODO' + if filename[:3] == "SVI": + geo_prefix = "GIMGO" + elif filename[:3] == "SVM": + geo_prefix = "GMODO" else: geo_prefix = None if geo_prefix: - file_content['/attr/N_GEO_Ref'] = geo_prefix + filename[5:] + file_content["/attr/N_GEO_Ref"] = geo_prefix + filename[5:] @staticmethod def _convert_numpy_content_to_dataarray(final_content): @@ -225,7 +225,7 @@ def _convert_numpy_content_to_dataarray(final_content): if isinstance(val, np.ndarray): val = da.from_array(val, chunks=val.shape) if val.ndim > 1: - final_content[key] = DataArray(val, dims=('y', 'x')) + final_content[key] = DataArray(val, dims=("y", "x")) else: final_content[key] = DataArray(val) @@ -234,9 +234,9 @@ def get_test_content(self, filename, filename_info, filetype_info): final_content = {} for dataset in self.datasets: dataset_group = DATASET_KEYS[dataset] - prefix1 = 'Data_Products/{dataset_group}'.format(dataset_group=dataset_group) - prefix2 = '{prefix}/{dataset_group}_Aggr'.format(prefix=prefix1, dataset_group=dataset_group) - prefix3 = 'All_Data/{dataset_group}_All'.format(dataset_group=dataset_group) + prefix1 = "Data_Products/{dataset_group}".format(dataset_group=dataset_group) + prefix2 = "{prefix}/{dataset_group}_Aggr".format(prefix=prefix1, dataset_group=dataset_group) + prefix3 = "All_Data/{dataset_group}_All".format(dataset_group=dataset_group) file_content = {} self._add_basic_metadata_to_file_content(file_content, filename_info, self._num_test_granules) @@ -248,10 +248,10 @@ def get_test_content(self, filename, filename_info, filetype_info): for k, v in list(file_content.items()): file_content[k.format(prefix1=prefix1, prefix2=prefix2, prefix3=prefix3)] = v - if filename[:3] in ['SVM', 'SVI', 'SVD']: + if filename[:3] in ["SVM", "SVI", "SVD"]: self._add_data_info_to_file_content(file_content, filename, prefix3, self._num_test_granules) - elif filename[0] == 'G': + elif filename[0] == "G": self._add_geolocation_info_to_file_content(file_content, filename, prefix3, self._num_test_granules) final_content.update(file_content) @@ -271,8 +271,8 @@ def touch_geo_files(*prefixes): def _touch_geo_file(prefix): - geo_fn = prefix + '_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' - open(geo_fn, 'w') + geo_fn = prefix + "_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5" + open(geo_fn, "w") return geo_fn @@ -282,48 +282,48 @@ class TestVIIRSSDRReader(unittest.TestCase): yaml_file = "viirs_sdr.yaml" def _assert_reflectance_properties(self, data_arr, num_scans=16, with_area=True): - self.assertTrue(np.issubdtype(data_arr.dtype, np.float32)) - self.assertEqual(data_arr.attrs['calibration'], 'reflectance') - self.assertEqual(data_arr.attrs['units'], '%') - self.assertEqual(data_arr.attrs['rows_per_scan'], num_scans) + assert np.issubdtype(data_arr.dtype, np.float32) + assert data_arr.attrs["calibration"] == "reflectance" + assert data_arr.attrs["units"] == "%" + assert data_arr.attrs["rows_per_scan"] == num_scans if with_area: - self.assertIn('area', data_arr.attrs) - self.assertIsNotNone(data_arr.attrs['area']) - self.assertEqual(data_arr.attrs['area'].shape, data_arr.shape) + assert "area" in data_arr.attrs + assert data_arr.attrs["area"] is not None + assert data_arr.attrs["area"].shape == data_arr.shape else: - self.assertNotIn('area', data_arr.attrs) + assert "area" not in data_arr.attrs def _assert_bt_properties(self, data_arr, num_scans=16, with_area=True): - self.assertTrue(np.issubdtype(data_arr.dtype, np.float32)) - self.assertEqual(data_arr.attrs['calibration'], 'brightness_temperature') - self.assertEqual(data_arr.attrs['units'], 'K') - self.assertEqual(data_arr.attrs['rows_per_scan'], num_scans) + assert np.issubdtype(data_arr.dtype, np.float32) + assert data_arr.attrs["calibration"] == "brightness_temperature" + assert data_arr.attrs["units"] == "K" + assert data_arr.attrs["rows_per_scan"] == num_scans if with_area: - self.assertIn('area', data_arr.attrs) - self.assertIsNotNone(data_arr.attrs['area']) - self.assertEqual(data_arr.attrs['area'].shape, data_arr.shape) + assert "area" in data_arr.attrs + assert data_arr.attrs["area"] is not None + assert data_arr.attrs["area"].shape == data_arr.shape else: - self.assertNotIn('area', data_arr.attrs) + assert "area" not in data_arr.attrs def _assert_dnb_radiance_properties(self, data_arr, with_area=True): - self.assertTrue(np.issubdtype(data_arr.dtype, np.float32)) - self.assertEqual(data_arr.attrs['calibration'], 'radiance') - self.assertEqual(data_arr.attrs['units'], 'W m-2 sr-1') - self.assertEqual(data_arr.attrs['rows_per_scan'], 16) + assert np.issubdtype(data_arr.dtype, np.float32) + assert data_arr.attrs["calibration"] == "radiance" + assert data_arr.attrs["units"] == "W m-2 sr-1" + assert data_arr.attrs["rows_per_scan"] == 16 if with_area: - self.assertIn('area', data_arr.attrs) - self.assertIsNotNone(data_arr.attrs['area']) - self.assertEqual(data_arr.attrs['area'].shape, data_arr.shape) + assert "area" in data_arr.attrs + assert data_arr.attrs["area"] is not None + assert data_arr.attrs["area"].shape == data_arr.shape else: - self.assertNotIn('area', data_arr.attrs) + assert "area" not in data_arr.attrs def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_atms_sdr_base import JPSS_SDR_FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(JPSS_SDR_FileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(JPSS_SDR_FileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -336,23 +336,21 @@ def test_init(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_init_start_time_is_nodate(self): """Test basic init with start_time being set to the no-date 1/1-1958.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) - with pytest.raises(ValueError) as exec_info: + with pytest.raises(ValueError, match="Datetime invalid 1958-01-01 00:00:00"): _ = r.create_filehandlers([ - 'SVI01_npp_d19580101_t0000000_e0001261_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d19580101_t0000000_e0001261_b01708_c20120226002130255476_noaa_ops.h5", ]) - expected = 'Datetime invalid 1958-01-01 00:00:00' - assert str(exec_info.value) == expected def test_init_start_time_beyond(self): """Test basic init with start_time after the provided files.""" @@ -361,12 +359,12 @@ def test_init_start_time_beyond(self): from satpy.readers import load_reader r = load_reader(self.reader_configs, filter_parameters={ - 'start_time': datetime(2012, 2, 26) + "start_time": datetime(2012, 2, 26) }) fhs = r.create_filehandlers([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) - self.assertEqual(len(fhs), 0) + assert len(fhs) == 0 def test_init_end_time_beyond(self): """Test basic init with end_time before the provided files.""" @@ -375,12 +373,12 @@ def test_init_end_time_beyond(self): from satpy.readers import load_reader r = load_reader(self.reader_configs, filter_parameters={ - 'end_time': datetime(2012, 2, 24) + "end_time": datetime(2012, 2, 24) }) fhs = r.create_filehandlers([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) - self.assertEqual(len(fhs), 0) + assert len(fhs) == 0 def test_init_start_end_time(self): """Test basic init with end_time before the provided files.""" @@ -390,48 +388,48 @@ def test_init_start_end_time(self): r = load_reader(self.reader_configs, filter_parameters={ - 'start_time': datetime(2012, 2, 24), - 'end_time': datetime(2012, 2, 26) + "start_time": datetime(2012, 2, 24), + "end_time": datetime(2012, 2, 26) }) loadables = r.select_files_from_pathnames([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers def test_load_all_m_reflectances_no_geo(self): """Load all M band reflectances with no geo files provided.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) - ds = r.load(['M01', - 'M02', - 'M03', - 'M04', - 'M05', - 'M06', - 'M07', - 'M08', - 'M09', - 'M10', - 'M11', + ds = r.load(["M01", + "M02", + "M03", + "M04", + "M05", + "M06", + "M07", + "M08", + "M09", + "M10", + "M11", ]) - self.assertEqual(len(ds), 11) + assert len(ds) == 11 for d in ds.values(): self._assert_reflectance_properties(d, with_area=False) @@ -440,34 +438,34 @@ def test_load_all_m_reflectances_find_geo(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) with touch_geo_files("GMTCO", "GMODO") as (geo_fn1, geo_fn2): r.create_filehandlers(loadables) - ds = r.load(['M01', - 'M02', - 'M03', - 'M04', - 'M05', - 'M06', - 'M07', - 'M08', - 'M09', - 'M10', - 'M11', + ds = r.load(["M01", + "M02", + "M03", + "M04", + "M05", + "M06", + "M07", + "M08", + "M09", + "M10", + "M11", ]) - self.assertEqual(len(ds), 11) + assert len(ds) == 11 for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) @@ -476,142 +474,142 @@ def test_load_all_m_reflectances_provided_geo(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) with touch_geo_files("GMTCO", "GMODO") as (geo_fn1, geo_fn2): r.create_filehandlers(loadables) - ds = r.load(['M01', - 'M02', - 'M03', - 'M04', - 'M05', - 'M06', - 'M07', - 'M08', - 'M09', - 'M10', - 'M11', + ds = r.load(["M01", + "M02", + "M03", + "M04", + "M05", + "M06", + "M07", + "M08", + "M09", + "M10", + "M11", ]) - self.assertEqual(len(ds), 11) + assert len(ds) == 11 for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) - self.assertEqual(d.attrs['area'].lons.min(), 5) - self.assertEqual(d.attrs['area'].lats.min(), 45) - self.assertEqual(d.attrs['area'].lons.attrs['rows_per_scan'], 16) - self.assertEqual(d.attrs['area'].lats.attrs['rows_per_scan'], 16) + assert d.attrs["area"].lons.min() == 5 + assert d.attrs["area"].lats.min() == 45 + assert d.attrs["area"].lons.attrs["rows_per_scan"] == 16 + assert d.attrs["area"].lats.attrs["rows_per_scan"] == 16 def test_load_all_m_reflectances_use_nontc(self): """Load all M band reflectances but use non-TC geolocation.""" from satpy.readers import load_reader r = load_reader(self.reader_configs, use_tc=False) loadables = r.select_files_from_pathnames([ - 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GMODO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GMODO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) with touch_geo_files("GMTCO", "GMODO") as (geo_fn1, geo_fn2): - r.create_filehandlers(loadables, {'use_tc': False}) - ds = r.load(['M01', - 'M02', - 'M03', - 'M04', - 'M05', - 'M06', - 'M07', - 'M08', - 'M09', - 'M10', - 'M11', + r.create_filehandlers(loadables, {"use_tc": False}) + ds = r.load(["M01", + "M02", + "M03", + "M04", + "M05", + "M06", + "M07", + "M08", + "M09", + "M10", + "M11", ]) - self.assertEqual(len(ds), 11) + assert len(ds) == 11 for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) - self.assertEqual(d.attrs['area'].lons.min(), 15) - self.assertEqual(d.attrs['area'].lats.min(), 55) - self.assertEqual(d.attrs['area'].lons.attrs['rows_per_scan'], 16) - self.assertEqual(d.attrs['area'].lats.attrs['rows_per_scan'], 16) + assert d.attrs["area"].lons.min() == 15 + assert d.attrs["area"].lats.min() == 55 + assert d.attrs["area"].lons.attrs["rows_per_scan"] == 16 + assert d.attrs["area"].lats.attrs["rows_per_scan"] == 16 def test_load_all_m_reflectances_use_nontc2(self): """Load all M band reflectances but use non-TC geolocation because TC isn't available.""" from satpy.readers import load_reader r = load_reader(self.reader_configs, use_tc=None) loadables = r.select_files_from_pathnames([ - 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GMODO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GMODO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) with touch_geo_files("GMODO") as (geo_fn2,): - r.create_filehandlers(loadables, {'use_tc': None}) - ds = r.load(['M01', - 'M02', - 'M03', - 'M04', - 'M05', - 'M06', - 'M07', - 'M08', - 'M09', - 'M10', - 'M11', + r.create_filehandlers(loadables, {"use_tc": None}) + ds = r.load(["M01", + "M02", + "M03", + "M04", + "M05", + "M06", + "M07", + "M08", + "M09", + "M10", + "M11", ]) - self.assertEqual(len(ds), 11) + assert len(ds) == 11 for d in ds.values(): self._assert_reflectance_properties(d, with_area=True) - self.assertEqual(d.attrs['area'].lons.min(), 15) - self.assertEqual(d.attrs['area'].lats.min(), 55) - self.assertEqual(d.attrs['area'].lons.attrs['rows_per_scan'], 16) - self.assertEqual(d.attrs['area'].lats.attrs['rows_per_scan'], 16) + assert d.attrs["area"].lons.min() == 15 + assert d.attrs["area"].lats.min() == 55 + assert d.attrs["area"].lons.attrs["rows_per_scan"] == 16 + assert d.attrs["area"].lats.attrs["rows_per_scan"] == 16 def test_load_all_m_bts(self): """Load all M band brightness temperatures.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVM12_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM13_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM14_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM15_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM16_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM12_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM13_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM14_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM15_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM16_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) - ds = r.load(['M12', - 'M13', - 'M14', - 'M15', - 'M16', + ds = r.load(["M12", + "M13", + "M14", + "M15", + "M16", ]) - self.assertEqual(len(ds), 5) + assert len(ds) == 5 for d in ds.values(): self._assert_bt_properties(d, with_area=True) @@ -625,22 +623,22 @@ def test_load_dnb_sza_no_factors(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) - r.create_filehandlers(loadables, {'include_factors': False}) - ds = r.load(['dnb_solar_zenith_angle', - 'dnb_solar_azimuth_angle', - 'dnb_satellite_zenith_angle', - 'dnb_satellite_azimuth_angle', - 'dnb_lunar_zenith_angle', - 'dnb_lunar_azimuth_angle']) - self.assertEqual(len(ds), 6) + r.create_filehandlers(loadables, {"include_factors": False}) + ds = r.load(["dnb_solar_zenith_angle", + "dnb_solar_azimuth_angle", + "dnb_satellite_zenith_angle", + "dnb_satellite_azimuth_angle", + "dnb_lunar_zenith_angle", + "dnb_lunar_azimuth_angle"]) + assert len(ds) == 6 for d in ds.values(): - self.assertTrue(np.issubdtype(d.dtype, np.float32)) - self.assertEqual(d.attrs['units'], 'degrees') - self.assertEqual(d.attrs['rows_per_scan'], 16) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + assert np.issubdtype(d.dtype, np.float32) + assert d.attrs["units"] == "degrees" + assert d.attrs["rows_per_scan"] == 16 + assert "area" in d.attrs + assert d.attrs["area"] is not None def test_load_all_m_radiances(self): """Load all M band radiances.""" @@ -648,73 +646,73 @@ def test_load_all_m_radiances(self): from satpy.tests.utils import make_dsq r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM12_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM13_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM14_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM15_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVM16_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVM01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM06_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM07_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM08_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM09_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM12_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM13_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM14_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM15_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVM16_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) ds = r.load([ - make_dsq(name='M01', calibration='radiance'), - make_dsq(name='M02', calibration='radiance'), - make_dsq(name='M03', calibration='radiance'), - make_dsq(name='M04', calibration='radiance'), - make_dsq(name='M05', calibration='radiance'), - make_dsq(name='M06', calibration='radiance'), - make_dsq(name='M07', calibration='radiance'), - make_dsq(name='M08', calibration='radiance'), - make_dsq(name='M09', calibration='radiance'), - make_dsq(name='M10', calibration='radiance'), - make_dsq(name='M11', calibration='radiance'), - make_dsq(name='M12', calibration='radiance'), - make_dsq(name='M13', calibration='radiance'), - make_dsq(name='M14', calibration='radiance'), - make_dsq(name='M15', calibration='radiance'), - make_dsq(name='M16', calibration='radiance'), + make_dsq(name="M01", calibration="radiance"), + make_dsq(name="M02", calibration="radiance"), + make_dsq(name="M03", calibration="radiance"), + make_dsq(name="M04", calibration="radiance"), + make_dsq(name="M05", calibration="radiance"), + make_dsq(name="M06", calibration="radiance"), + make_dsq(name="M07", calibration="radiance"), + make_dsq(name="M08", calibration="radiance"), + make_dsq(name="M09", calibration="radiance"), + make_dsq(name="M10", calibration="radiance"), + make_dsq(name="M11", calibration="radiance"), + make_dsq(name="M12", calibration="radiance"), + make_dsq(name="M13", calibration="radiance"), + make_dsq(name="M14", calibration="radiance"), + make_dsq(name="M15", calibration="radiance"), + make_dsq(name="M16", calibration="radiance"), ]) - self.assertEqual(len(ds), 16) + assert len(ds) == 16 for d in ds.values(): - self.assertTrue(np.issubdtype(d.dtype, np.float32)) - self.assertEqual(d.attrs['calibration'], 'radiance') - self.assertEqual(d.attrs['units'], 'W m-2 um-1 sr-1') - self.assertEqual(d.attrs['rows_per_scan'], 16) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + assert np.issubdtype(d.dtype, np.float32) + assert d.attrs["calibration"] == "radiance" + assert d.attrs["units"] == "W m-2 um-1 sr-1" + assert d.attrs["rows_per_scan"] == 16 + assert "area" in d.attrs + assert d.attrs["area"] is not None def test_load_dnb(self): """Load DNB dataset.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) - ds = r.load(['DNB']) - self.assertEqual(len(ds), 1) + ds = r.load(["DNB"]) + assert len(ds) == 1 for d in ds.values(): data = d.values # default scale factors are 2 and offset 1 # multiply DNB by 10000 should mean the first value of 0 should be: # data * factor * 10000 + offset * 10000 # 0 * 2 * 10000 + 1 * 10000 => 10000 - self.assertEqual(data[0, 0], 10000) + assert data[0, 0] == 10000 # the second value of 1 should be: # 1 * 2 * 10000 + 1 * 10000 => 30000 - self.assertEqual(data[0, 1], 30000) + assert data[0, 1] == 30000 self._assert_dnb_radiance_properties(d, with_area=True) def test_load_dnb_no_factors(self): @@ -722,22 +720,22 @@ def test_load_dnb_no_factors(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) - r.create_filehandlers(loadables, {'include_factors': False}) - ds = r.load(['DNB']) - self.assertEqual(len(ds), 1) + r.create_filehandlers(loadables, {"include_factors": False}) + ds = r.load(["DNB"]) + assert len(ds) == 1 for d in ds.values(): data = d.values # no scale factors, default factor 1 and offset 0 # multiply DNB by 10000 should mean the first value of 0 should be: # data * factor * 10000 + offset * 10000 # 0 * 1 * 10000 + 0 * 10000 => 0 - self.assertEqual(data[0, 0], 0) + assert data[0, 0] == 0 # the second value of 1 should be: # 1 * 1 * 10000 + 0 * 10000 => 10000 - self.assertEqual(data[0, 1], 10000) + assert data[0, 1] == 10000 self._assert_dnb_radiance_properties(d, with_area=True) def test_load_i_no_files(self): @@ -745,51 +743,51 @@ def test_load_i_no_files(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVDNB_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GDNBO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) - self.assertNotIn('I01', [x['name'] for x in r.available_dataset_ids]) - ds = r.load(['I01']) - self.assertEqual(len(ds), 0) + assert "I01" not in [x["name"] for x in r.available_dataset_ids] + ds = r.load(["I01"]) + assert len(ds) == 0 def test_load_all_i_reflectances_provided_geo(self): """Load all I band reflectances with geo files provided.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) - ds = r.load(['I01', - 'I02', - 'I03', + ds = r.load(["I01", + "I02", + "I03", ]) - self.assertEqual(len(ds), 3) + assert len(ds) == 3 for d in ds.values(): self._assert_reflectance_properties(d, num_scans=32) - self.assertEqual(d.attrs['area'].lons.min(), 5) - self.assertEqual(d.attrs['area'].lats.min(), 45) - self.assertEqual(d.attrs['area'].lons.attrs['rows_per_scan'], 32) - self.assertEqual(d.attrs['area'].lats.attrs['rows_per_scan'], 32) + assert d.attrs["area"].lons.min() == 5 + assert d.attrs["area"].lats.min() == 45 + assert d.attrs["area"].lons.attrs["rows_per_scan"] == 32 + assert d.attrs["area"].lats.attrs["rows_per_scan"] == 32 def test_load_all_i_bts(self): """Load all I band brightness temperatures.""" from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVI04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) - ds = r.load(['I04', - 'I05', + ds = r.load(["I04", + "I05", ]) - self.assertEqual(len(ds), 2) + assert len(ds) == 2 for d in ds.values(): self._assert_bt_properties(d, num_scans=32) @@ -799,29 +797,29 @@ def test_load_all_i_radiances(self): from satpy.tests.utils import make_dsq r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'SVI05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', - 'GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI02_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI03_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI04_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "SVI05_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", + "GITCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) ds = r.load([ - make_dsq(name='I01', calibration='radiance'), - make_dsq(name='I02', calibration='radiance'), - make_dsq(name='I03', calibration='radiance'), - make_dsq(name='I04', calibration='radiance'), - make_dsq(name='I05', calibration='radiance'), + make_dsq(name="I01", calibration="radiance"), + make_dsq(name="I02", calibration="radiance"), + make_dsq(name="I03", calibration="radiance"), + make_dsq(name="I04", calibration="radiance"), + make_dsq(name="I05", calibration="radiance"), ]) - self.assertEqual(len(ds), 5) + assert len(ds) == 5 for d in ds.values(): - self.assertTrue(np.issubdtype(d.dtype, np.float32)) - self.assertEqual(d.attrs['calibration'], 'radiance') - self.assertEqual(d.attrs['units'], 'W m-2 um-1 sr-1') - self.assertEqual(d.attrs['rows_per_scan'], 32) - self.assertIn('area', d.attrs) - self.assertIsNotNone(d.attrs['area']) + assert np.issubdtype(d.dtype, np.float32) is True + assert d.attrs["calibration"] == "radiance" + assert d.attrs["units"] == "W m-2 um-1 sr-1" + assert d.attrs["rows_per_scan"] == 32 + assert "area" in d.attrs + assert d.attrs["area"] is not None class FakeHDF5FileHandlerAggr(FakeHDF5FileHandler2): @@ -840,9 +838,9 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_sdr import VIIRSSDRFileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeHDF5FileHandlerAggr,)) + self.p = mock.patch.object(VIIRSSDRFileHandler, "__bases__", (FakeHDF5FileHandlerAggr,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -855,7 +853,7 @@ def test_bounding_box(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) # make sure we have some files @@ -869,7 +867,7 @@ def test_bounding_box(self): 65.545685, 63.103046, 61.853905, 55.169273, 57.062447, 58.77254, 60.465942, 62.036346, 63.465122, 64.72178, 65.78417, 66.66166, 79.00025 ] - lons, lats = r.file_handlers['generic_file'][0].get_bounding_box() + lons, lats = r.file_handlers["generic_file"][0].get_bounding_box() np.testing.assert_allclose(lons, expected_lons) np.testing.assert_allclose(lats, expected_lats) @@ -890,9 +888,9 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_atms_sdr_base import JPSS_SDR_FileHandler - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(JPSS_SDR_FileHandler, '__bases__', (FakeShortHDF5FileHandlerAggr,)) + self.p = mock.patch.object(JPSS_SDR_FileHandler, "__bases__", (FakeShortHDF5FileHandlerAggr,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -905,11 +903,11 @@ def test_load_truncated_band(self): from satpy.readers import load_reader r = load_reader(self.reader_configs) loadables = r.select_files_from_pathnames([ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) r.create_filehandlers(loadables) ds = r.load(["I01"]) - self.assertEqual(len(ds), 1) + assert len(ds) == 1 i01_data = ds["I01"].compute() expected_rows = sum(FakeShortHDF5FileHandlerAggr._num_scans_per_gran) * DEFAULT_FILE_SHAPE[0] - self.assertEqual(i01_data.shape, (expected_rows, 300)) + assert i01_data.shape == (expected_rows, 300) diff --git a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py index 2f926a0e47..49206962e5 100644 --- a/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py +++ b/satpy/tests/reader_tests/test_viirs_vgac_l1c_nc.py @@ -29,36 +29,36 @@ from netCDF4 import Dataset -@pytest.fixture -def _nc_filename(tmp_path): +@pytest.fixture() +def nc_filename(tmp_path): now = datetime.datetime.utcnow() - filename = f'VGAC_VJ10XMOD_A{now:%Y%j_%H%M}_n004946_K005.nc' + filename = f"VGAC_VJ10XMOD_A{now:%Y%j_%H%M}_n004946_K005.nc" filename_str = str(tmp_path / filename) # Create test data - with Dataset(filename_str, 'w') as nc: + with Dataset(filename_str, "w") as nc: nscn = 7 npix = 800 n_lut = 12000 - nc.createDimension('npix', npix) - nc.createDimension('nscn', nscn) - nc.createDimension('n_lut', n_lut) + nc.createDimension("npix", npix) + nc.createDimension("nscn", nscn) + nc.createDimension("n_lut", n_lut) nc.StartTime = "2023-03-28T09:08:07" nc.EndTime = "2023-03-28T10:11:12" for ind in range(1, 11, 1): ch_name = "M{:02d}".format(ind) - r_a = nc.createVariable(ch_name, np.int16, dimensions=('nscn', 'npix')) + r_a = nc.createVariable(ch_name, np.int16, dimensions=("nscn", "npix")) r_a[:] = np.ones((nscn, npix)) * 10 - attrs = {'scale_factor': 0.1, 'units': 'percent'} + attrs = {"scale_factor": 0.1, "units": "percent"} for attr in attrs: setattr(r_a, attr, attrs[attr]) for ind in range(12, 17, 1): ch_name = "M{:02d}".format(ind) - tb_b = nc.createVariable(ch_name, np.int16, dimensions=('nscn', 'npix')) + tb_b = nc.createVariable(ch_name, np.int16, dimensions=("nscn", "npix")) tb_b[:] = np.ones((nscn, npix)) * 800 - attrs = {'units': 'radiances', 'scale_factor': 0.002} + attrs = {"units": "radiances", "scale_factor": 0.002} for attr in attrs: setattr(tb_b, attr, attrs[attr]) - tb_lut = nc.createVariable(ch_name + "_LUT", np.float32, dimensions=('n_lut')) + tb_lut = nc.createVariable(ch_name + "_LUT", np.float32, dimensions=("n_lut")) tb_lut[:] = np.array(range(0, n_lut)) * 0.5 return filename_str @@ -66,14 +66,14 @@ def _nc_filename(tmp_path): class TestVGACREader: """Test the VGACFileHandler reader.""" - def test_read_vgac(self, _nc_filename): + def test_read_vgac(self, nc_filename): """Test reading reflectances and BT.""" from satpy.scene import Scene # Read data scn_ = Scene( - reader='viirs_vgac_l1c_nc', - filenames=[_nc_filename]) + reader="viirs_vgac_l1c_nc", + filenames=[nc_filename]) scn_.load(["M05", "M15"]) assert (scn_["M05"][0, 0] == 100) assert (scn_["M15"][0, 0] == 400) diff --git a/satpy/tests/reader_tests/test_virr_l1b.py b/satpy/tests/reader_tests/test_virr_l1b.py index a7a76cafb3..e3fbd73272 100644 --- a/satpy/tests/reader_tests/test_virr_l1b.py +++ b/satpy/tests/reader_tests/test_virr_l1b.py @@ -39,48 +39,48 @@ def _make_file(self, platform_id, geolocation_prefix, l1b_prefix, ECWN, Emissive dim_1 = 20 test_file = { # Satellite data. - '/attr/Day Or Night Flag': 'D', '/attr/Observing Beginning Date': '2018-12-25', - '/attr/Observing Beginning Time': '21:41:47.090', '/attr/Observing Ending Date': '2018-12-25', - '/attr/Observing Ending Time': '21:47:28.254', '/attr/Satellite Name': platform_id, - '/attr/Sensor Identification Code': 'VIRR', + "/attr/Day Or Night Flag": "D", "/attr/Observing Beginning Date": "2018-12-25", + "/attr/Observing Beginning Time": "21:41:47.090", "/attr/Observing Ending Date": "2018-12-25", + "/attr/Observing Ending Time": "21:47:28.254", "/attr/Satellite Name": platform_id, + "/attr/Sensor Identification Code": "VIRR", # Emissive data. - l1b_prefix + 'EV_Emissive': self.make_test_data([3, dim_0, dim_1]), - l1b_prefix + 'EV_Emissive/attr/valid_range': [0, 50000], - l1b_prefix + 'Emissive_Radiance_Scales': self.make_test_data([dim_0, dim_1]), - l1b_prefix + 'EV_Emissive/attr/units': Emissive_units, - l1b_prefix + 'Emissive_Radiance_Offsets': self.make_test_data([dim_0, dim_1]), - '/attr/' + ECWN: [2610.31, 917.6268, 836.2546], + l1b_prefix + "EV_Emissive": self.make_test_data([3, dim_0, dim_1]), + l1b_prefix + "EV_Emissive/attr/valid_range": [0, 50000], + l1b_prefix + "Emissive_Radiance_Scales": self.make_test_data([dim_0, dim_1]), + l1b_prefix + "EV_Emissive/attr/units": Emissive_units, + l1b_prefix + "Emissive_Radiance_Offsets": self.make_test_data([dim_0, dim_1]), + "/attr/" + ECWN: [2610.31, 917.6268, 836.2546], # Reflectance data. - l1b_prefix + 'EV_RefSB': self.make_test_data([7, dim_0, dim_1]), - l1b_prefix + 'EV_RefSB/attr/valid_range': [0, 32767], l1b_prefix + 'EV_RefSB/attr/units': 'none', - '/attr/RefSB_Cal_Coefficients': np.ones(14, dtype=np.float32) * 2 + l1b_prefix + "EV_RefSB": self.make_test_data([7, dim_0, dim_1]), + l1b_prefix + "EV_RefSB/attr/valid_range": [0, 32767], l1b_prefix + "EV_RefSB/attr/units": "none", + "/attr/RefSB_Cal_Coefficients": np.ones(14, dtype=np.float32) * 2 } - for attribute in ['Latitude', 'Longitude', geolocation_prefix + 'SolarZenith', - geolocation_prefix + 'SensorZenith', geolocation_prefix + 'SolarAzimuth', - geolocation_prefix + 'SensorAzimuth']: + for attribute in ["Latitude", "Longitude", geolocation_prefix + "SolarZenith", + geolocation_prefix + "SensorZenith", geolocation_prefix + "SolarAzimuth", + geolocation_prefix + "SensorAzimuth"]: test_file[attribute] = self.make_test_data([dim_0, dim_1]) - test_file[attribute + '/attr/Intercept'] = 0. - test_file[attribute + '/attr/units'] = 'degrees' - if 'Solar' in attribute or 'Sensor' in attribute: - test_file[attribute + '/attr/Slope'] = .01 - if 'Azimuth' in attribute: - test_file[attribute + '/attr/valid_range'] = [0, 18000] + test_file[attribute + "/attr/Intercept"] = 0. + test_file[attribute + "/attr/units"] = "degrees" + if "Solar" in attribute or "Sensor" in attribute: + test_file[attribute + "/attr/Slope"] = .01 + if "Azimuth" in attribute: + test_file[attribute + "/attr/valid_range"] = [0, 18000] else: - test_file[attribute + '/attr/valid_range'] = [-18000, 18000] + test_file[attribute + "/attr/valid_range"] = [-18000, 18000] else: - test_file[attribute + '/attr/Slope'] = 1. - if 'Longitude' == attribute: - test_file[attribute + '/attr/valid_range'] = [-180., 180.] + test_file[attribute + "/attr/Slope"] = 1. + if "Longitude" == attribute: + test_file[attribute + "/attr/valid_range"] = [-180., 180.] else: - test_file[attribute + '/attr/valid_range'] = [-90., 90.] + test_file[attribute + "/attr/valid_range"] = [-90., 90.] return test_file def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" - if filename_info['platform_id'] == 'FY3B': - return self._make_file('FY3B', '', '', 'Emmisive_Centroid_Wave_Number', 'milliWstts/m^2/cm^(-1)/steradian') - return self._make_file(filename_info['platform_id'], 'Geolocation/', 'Data/', - 'Emissive_Centroid_Wave_Number', 'none') + if filename_info["platform_id"] == "FY3B": + return self._make_file("FY3B", "", "", "Emmisive_Centroid_Wave_Number", "milliWstts/m^2/cm^(-1)/steradian") + return self._make_file(filename_info["platform_id"], "Geolocation/", "Data/", + "Emissive_Centroid_Wave_Number", "none") class TestVIRRL1BReader(unittest.TestCase): @@ -92,9 +92,9 @@ def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.virr_l1b import VIRR_L1B - self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(VIRR_L1B, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(VIRR_L1B, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -104,84 +104,82 @@ def tearDown(self): def _band_helper(self, attributes, units, calibration, standard_name, file_type, band_index_size, resolution): - self.assertEqual(units, attributes['units']) - self.assertEqual(calibration, attributes['calibration']) - self.assertEqual(standard_name, attributes['standard_name']) - self.assertEqual(file_type, attributes['file_type']) - self.assertTrue(attributes['band_index'] in range(band_index_size)) - self.assertEqual(resolution, attributes['resolution']) - self.assertEqual(('longitude', 'latitude'), attributes['coordinates']) + assert units == attributes["units"] + assert calibration == attributes["calibration"] + assert standard_name == attributes["standard_name"] + assert file_type == attributes["file_type"] + assert (attributes["band_index"] in range(band_index_size)) is True + assert resolution == attributes["resolution"] + assert ("longitude", "latitude") == attributes["coordinates"] def _fy3_helper(self, platform_name, reader, Emissive_units): """Load channels and test accurate metadata.""" import datetime - band_values = {'1': 22.0, '2': 22.0, '6': 22.0, '7': 22.0, '8': 22.0, '9': 22.0, '10': 22.0, - '3': 496.542155, '4': 297.444511, '5': 288.956557, 'solar_zenith_angle': .1, - 'satellite_zenith_angle': .1, 'solar_azimuth_angle': .1, 'satellite_azimuth_angle': .1, - 'longitude': 10} - if platform_name == 'FY3B': + band_values = {"1": 22.0, "2": 22.0, "6": 22.0, "7": 22.0, "8": 22.0, "9": 22.0, "10": 22.0, + "3": 496.542155, "4": 297.444511, "5": 288.956557, "solar_zenith_angle": .1, + "satellite_zenith_angle": .1, "solar_azimuth_angle": .1, "satellite_azimuth_angle": .1, + "longitude": 10} + if platform_name == "FY3B": # updated 2015 coefficients - band_values['1'] = -0.168 - band_values['2'] = -0.2706 - band_values['6'] = -1.5631 - band_values['7'] = -0.2114 - band_values['8'] = -0.171 - band_values['9'] = -0.1606 - band_values['10'] = -0.1328 + band_values["1"] = -0.168 + band_values["2"] = -0.2706 + band_values["6"] = -1.5631 + band_values["7"] = -0.2114 + band_values["8"] = -0.171 + band_values["9"] = -0.1606 + band_values["10"] = -0.1328 datasets = reader.load([band for band in band_values]) for dataset in datasets: # Object returned by get_dataset. - ds = datasets[dataset['name']] + ds = datasets[dataset["name"]] attributes = ds.attrs - self.assertTrue(isinstance(ds.data, da.Array)) - self.assertEqual('virr', attributes['sensor']) - self.assertEqual(platform_name, attributes['platform_name']) - self.assertEqual(datetime.datetime(2018, 12, 25, 21, 41, 47, 90000), attributes['start_time']) - self.assertEqual(datetime.datetime(2018, 12, 25, 21, 47, 28, 254000), attributes['end_time']) - self.assertEqual((19, 20), datasets[dataset['name']].shape) - self.assertEqual(('y', 'x'), datasets[dataset['name']].dims) - if dataset['name'] in ['1', '2', '6', '7', '8', '9', '10']: - self._band_helper(attributes, '%', 'reflectance', - 'toa_bidirectional_reflectance', 'virr_l1b', + assert isinstance(ds.data, da.Array) + assert "virr" == attributes["sensor"] + assert platform_name == attributes["platform_name"] + assert datetime.datetime(2018, 12, 25, 21, 41, 47, 90000) == attributes["start_time"] + assert datetime.datetime(2018, 12, 25, 21, 47, 28, 254000) == attributes["end_time"] + assert (19, 20) == datasets[dataset["name"]].shape + assert ("y", "x") == datasets[dataset["name"]].dims + if dataset["name"] in ["1", "2", "6", "7", "8", "9", "10"]: + self._band_helper(attributes, "%", "reflectance", + "toa_bidirectional_reflectance", "virr_l1b", 7, 1000) - elif dataset['name'] in ['3', '4', '5']: - self._band_helper(attributes, Emissive_units, 'brightness_temperature', - 'toa_brightness_temperature', 'virr_l1b', 3, 1000) - elif dataset['name'] in ['longitude', 'latitude']: - self.assertEqual('degrees', attributes['units']) - self.assertTrue(attributes['standard_name'] in ['longitude', 'latitude']) - self.assertEqual(['virr_l1b', 'virr_geoxx'], attributes['file_type']) - self.assertEqual(1000, attributes['resolution']) + elif dataset["name"] in ["3", "4", "5"]: + self._band_helper(attributes, Emissive_units, "brightness_temperature", + "toa_brightness_temperature", "virr_l1b", 3, 1000) + elif dataset["name"] in ["longitude", "latitude"]: + assert "degrees" == attributes["units"] + assert (attributes["standard_name"] in ["longitude", "latitude"]) is True + assert ["virr_l1b", "virr_geoxx"] == attributes["file_type"] + assert 1000 == attributes["resolution"] else: - self.assertEqual('degrees', attributes['units']) - self.assertTrue( - attributes['standard_name'] in ['solar_zenith_angle', 'sensor_zenith_angle', 'solar_azimuth_angle', - 'sensor_azimuth_angle']) - self.assertEqual(['virr_geoxx', 'virr_l1b'], attributes['file_type']) - self.assertEqual(('longitude', 'latitude'), attributes['coordinates']) - self.assertEqual(band_values[dataset['name']], - round(float(np.array(ds[ds.shape[0] // 2][ds.shape[1] // 2])), 6)) + assert "degrees" == attributes["units"] + assert attributes["standard_name"] in ["solar_zenith_angle", "sensor_zenith_angle", + "solar_azimuth_angle", "sensor_azimuth_angle"] + assert ["virr_geoxx", "virr_l1b"] == attributes["file_type"] + assert ("longitude", "latitude") == attributes["coordinates"] + assert band_values[dataset["name"]] == round(float(np.array(ds[ds.shape[0] // 2][ds.shape[1] // 2])), 6) assert "valid_range" not in ds.attrs def test_fy3b_file(self): """Test that FY3B files are recognized.""" from satpy.readers import load_reader FY3B_reader = load_reader(self.reader_configs) - FY3B_file = FY3B_reader.select_files_from_pathnames(['tf2018359214943.FY3B-L_VIRRX_L1B.HDF']) - self.assertEqual(1, len(FY3B_file)) + FY3B_file = FY3B_reader.select_files_from_pathnames(["tf2018359214943.FY3B-L_VIRRX_L1B.HDF"]) + assert 1 == len(FY3B_file) FY3B_reader.create_filehandlers(FY3B_file) # Make sure we have some files - self.assertTrue(FY3B_reader.file_handlers) - self._fy3_helper('FY3B', FY3B_reader, 'milliWstts/m^2/cm^(-1)/steradian') + assert FY3B_reader.file_handlers + self._fy3_helper("FY3B", FY3B_reader, "milliWstts/m^2/cm^(-1)/steradian") def test_fy3c_file(self): """Test that FY3C files are recognized.""" from satpy.readers import load_reader FY3C_reader = load_reader(self.reader_configs) - FY3C_files = FY3C_reader.select_files_from_pathnames(['tf2018359143912.FY3C-L_VIRRX_GEOXX.HDF', - 'tf2018359143912.FY3C-L_VIRRX_L1B.HDF']) - self.assertEqual(2, len(FY3C_files)) + FY3C_files = FY3C_reader.select_files_from_pathnames(["tf2018359143912.FY3C-L_VIRRX_GEOXX.HDF", + "tf2018359143912.FY3C-L_VIRRX_L1B.HDF"]) + assert 2 == len(FY3C_files) FY3C_reader.create_filehandlers(FY3C_files) # Make sure we have some files - self.assertTrue(FY3C_reader.file_handlers) - self._fy3_helper('FY3C', FY3C_reader, '1') + assert FY3C_reader.file_handlers + self._fy3_helper("FY3C", FY3C_reader, "1") diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py index c62ffcea1d..a886c3fa60 100644 --- a/satpy/tests/scene_tests/test_conversions.py +++ b/satpy/tests/scene_tests/test_conversions.py @@ -35,8 +35,8 @@ def test_serialization_with_readers_and_data_arr(self): """Test that dask can serialize a Scene with readers.""" from distributed.protocol import deserialize, serialize - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"]) cloned_scene = deserialize(*serialize(scene)) assert scene._readers.keys() == cloned_scene._readers.keys() assert scene.all_dataset_ids == scene.all_dataset_ids @@ -57,12 +57,12 @@ def test_geoviews_basic_with_area(self): """Test converting a Scene to geoviews with an AreaDefinition.""" from pyresample.geometry import AreaDefinition scn = Scene() - area = AreaDefinition('test', 'test', 'test', - {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + area = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) - scn['ds1'] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1), - 'area': area}) + scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), + "area": area}) gv_obj = scn.to_geoviews() # we assume that if we got something back, geoviews can use it assert gv_obj is not None @@ -74,9 +74,9 @@ def test_geoviews_basic_with_swath(self): lons = xr.DataArray(da.zeros((2, 2))) lats = xr.DataArray(da.zeros((2, 2))) area = SwathDefinition(lons, lats) - scn['ds1'] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1), - 'area': area}) + scn["ds1"] = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), + "area": area}) gv_obj = scn.to_geoviews() # we assume that if we got something back, geoviews can use it assert gv_obj is not None @@ -93,42 +93,42 @@ def test_with_empty_scene(self): assert len(ds.variables) == 0 assert len(ds.coords) == 0 - @pytest.fixture + @pytest.fixture() def single_area_scn(self): """Define Scene with single area.""" from pyresample.geometry import AreaDefinition - area = AreaDefinition('test', 'test', 'test', - {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + area = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) data_array = xr.DataArray(da.zeros((2, 2), chunks=-1), - dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1), 'area': area}) + dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), "area": area}) scn = Scene() - scn['var1'] = data_array + scn["var1"] = data_array return scn - @pytest.fixture + @pytest.fixture() def multi_area_scn(self): """Define Scene with multiple area.""" from pyresample.geometry import AreaDefinition - area1 = AreaDefinition('test', 'test', 'test', - {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + area1 = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 2, 2, [-200, -200, 200, 200]) - area2 = AreaDefinition('test', 'test', 'test', - {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0}, + area2 = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, 4, 4, [-200, -200, 200, 200]) data_array1 = xr.DataArray(da.zeros((2, 2), chunks=-1), - dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1), 'area': area1}) + dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), "area": area1}) data_array2 = xr.DataArray(da.zeros((4, 4), chunks=-1), - dims=('y', 'x'), - attrs={'start_time': datetime(2018, 1, 1), 'area': area2}) + dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), "area": area2}) scn = Scene() - scn['var1'] = data_array1 - scn['var2'] = data_array2 + scn["var1"] = data_array1 + scn["var2"] = data_array2 return scn def test_with_single_area_scene_type(self, single_area_scn): @@ -162,5 +162,5 @@ def test_wrong_dataset_key(self, single_area_scn): def test_to_xarray_with_multiple_area_scene(self, multi_area_scn): """Test converting muiltple area Scene to xarray.""" # TODO: in future adapt for DataTree implementation - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Datasets to be saved .* must have identical projection coordinates."): _ = multi_area_scn.to_xarray() diff --git a/satpy/tests/scene_tests/test_data_access.py b/satpy/tests/scene_tests/test_data_access.py index f345679e03..66129ad8bb 100644 --- a/satpy/tests/scene_tests/test_data_access.py +++ b/satpy/tests/scene_tests/test_data_access.py @@ -83,21 +83,22 @@ def test_iter_by_area_swath(self): from pyresample.geometry import SwathDefinition scene = Scene() sd = SwathDefinition(lons=np.arange(5), lats=np.arange(5)) - scene["1"] = xr.DataArray(np.arange(5), attrs={'area': sd}) - scene["2"] = xr.DataArray(np.arange(5), attrs={'area': sd}) + scene["1"] = xr.DataArray(np.arange(5), attrs={"area": sd}) + scene["2"] = xr.DataArray(np.arange(5), attrs={"area": sd}) scene["3"] = xr.DataArray(np.arange(5)) for area_obj, ds_list in scene.iter_by_area(): - ds_list_names = set(ds['name'] for ds in ds_list) + ds_list_names = set(ds["name"] for ds in ds_list) if area_obj is sd: - assert ds_list_names == {'1', '2'} + assert ds_list_names == {"1", "2"} else: assert area_obj is None - assert ds_list_names == {'3'} + assert ds_list_names == {"3"} def test_bad_setitem(self): """Test setting an item wrongly.""" scene = Scene() - pytest.raises(ValueError, scene.__setitem__, '1', np.arange(5)) + with pytest.raises(ValueError, match="Key must be a DataID when value is not an xarray DataArray or dict"): + scene.__setitem__("1", np.arange(5)) def test_setitem(self): """Test setting an item.""" @@ -108,16 +109,16 @@ def test_setitem(self): assert set(scene._datasets.keys()) == {expected_id} assert set(scene._wishlist) == {expected_id} - did = make_dataid(name='oranges') + did = make_dataid(name="oranges") scene[did] = ds1 - assert 'oranges' in scene + assert "oranges" in scene nparray = np.arange(5*5).reshape(5, 5) - with pytest.raises(ValueError): - scene['apples'] = nparray - assert 'apples' not in scene - did = make_dataid(name='apples') + with pytest.raises(ValueError, match="Key must be a DataID when value is not an xarray DataArray or dict"): + scene["apples"] = nparray + assert "apples" not in scene + did = make_dataid(name="apples") scene[did] = nparray - assert 'apples' in scene + assert "apples" in scene def test_getitem(self): """Test __getitem__ with names only.""" @@ -125,41 +126,41 @@ def test_getitem(self): scene["1"] = ds1 = xr.DataArray(np.arange(5)) scene["2"] = ds2 = xr.DataArray(np.arange(5)) scene["3"] = ds3 = xr.DataArray(np.arange(5)) - assert scene['1'] is ds1 - assert scene['2'] is ds2 - assert scene['3'] is ds3 - pytest.raises(KeyError, scene.__getitem__, '4') - assert scene.get('3') is ds3 - assert scene.get('4') is None + assert scene["1"] is ds1 + assert scene["2"] is ds2 + assert scene["3"] is ds3 + pytest.raises(KeyError, scene.__getitem__, "4") + assert scene.get("3") is ds3 + assert scene.get("4") is None def test_getitem_modifiers(self): """Test __getitem__ with names and modifiers.""" # Return least modified item scene = Scene() - scene['1'] = ds1_m0 = xr.DataArray(np.arange(5)) - scene[make_dataid(name='1', modifiers=('mod1',)) + scene["1"] = ds1_m0 = xr.DataArray(np.arange(5)) + scene[make_dataid(name="1", modifiers=("mod1",)) ] = xr.DataArray(np.arange(5)) - assert scene['1'] is ds1_m0 + assert scene["1"] is ds1_m0 assert len(list(scene.keys())) == 2 scene = Scene() - scene['1'] = ds1_m0 = xr.DataArray(np.arange(5)) - scene[make_dataid(name='1', modifiers=('mod1',)) + scene["1"] = ds1_m0 = xr.DataArray(np.arange(5)) + scene[make_dataid(name="1", modifiers=("mod1",)) ] = xr.DataArray(np.arange(5)) - scene[make_dataid(name='1', modifiers=('mod1', 'mod2')) + scene[make_dataid(name="1", modifiers=("mod1", "mod2")) ] = xr.DataArray(np.arange(5)) - assert scene['1'] is ds1_m0 + assert scene["1"] is ds1_m0 assert len(list(scene.keys())) == 3 scene = Scene() - scene[make_dataid(name='1', modifiers=('mod1', 'mod2')) + scene[make_dataid(name="1", modifiers=("mod1", "mod2")) ] = ds1_m2 = xr.DataArray(np.arange(5)) - scene[make_dataid(name='1', modifiers=('mod1',)) + scene[make_dataid(name="1", modifiers=("mod1",)) ] = ds1_m1 = xr.DataArray(np.arange(5)) - assert scene['1'] is ds1_m1 - assert scene[make_dataid(name='1', modifiers=('mod1', 'mod2'))] is ds1_m2 + assert scene["1"] is ds1_m1 + assert scene[make_dataid(name="1", modifiers=("mod1", "mod2"))] is ds1_m2 pytest.raises(KeyError, scene.__getitem__, - make_dataid(name='1', modifiers=tuple())) + make_dataid(name="1", modifiers=tuple())) assert len(list(scene.keys())) == 2 def test_getitem_slices(self): @@ -168,13 +169,13 @@ def test_getitem_slices(self): from pyresample.utils import proj4_str_to_dict scene1 = Scene() scene2 = Scene() - proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 ' - '+units=m +no_defs') + proj_dict = proj4_str_to_dict("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 " + "+units=m +no_defs") area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, 200, 400, @@ -184,80 +185,80 @@ def test_getitem_slices(self): lats=np.zeros((5, 10))) scene1["1"] = scene2["1"] = xr.DataArray(np.zeros((5, 10))) scene1["2"] = scene2["2"] = xr.DataArray(np.zeros((5, 10)), - dims=('y', 'x')) - scene1["3"] = xr.DataArray(np.zeros((5, 10)), dims=('y', 'x'), - attrs={'area': area_def}) + dims=("y", "x")) + scene1["3"] = xr.DataArray(np.zeros((5, 10)), dims=("y", "x"), + attrs={"area": area_def}) anc_vars = [xr.DataArray(np.ones((5, 10)), - attrs={'name': 'anc_var', 'area': area_def})] - attrs = {'ancillary_variables': anc_vars, 'area': area_def} + attrs={"name": "anc_var", "area": area_def})] + attrs = {"ancillary_variables": anc_vars, "area": area_def} scene1["3a"] = xr.DataArray(np.zeros((5, 10)), - dims=('y', 'x'), + dims=("y", "x"), attrs=attrs) - scene2["4"] = xr.DataArray(np.zeros((5, 10)), dims=('y', 'x'), - attrs={'area': swath_def}) + scene2["4"] = xr.DataArray(np.zeros((5, 10)), dims=("y", "x"), + attrs={"area": swath_def}) anc_vars = [xr.DataArray(np.ones((5, 10)), - attrs={'name': 'anc_var', 'area': swath_def})] - attrs = {'ancillary_variables': anc_vars, 'area': swath_def} + attrs={"name": "anc_var", "area": swath_def})] + attrs = {"ancillary_variables": anc_vars, "area": swath_def} scene2["4a"] = xr.DataArray(np.zeros((5, 10)), - dims=('y', 'x'), + dims=("y", "x"), attrs=attrs) new_scn1 = scene1[2:5, 2:8] new_scn2 = scene2[2:5, 2:8] for new_scn in [new_scn1, new_scn2]: # datasets without an area don't get sliced - assert new_scn['1'].shape == (5, 10) - assert new_scn['2'].shape == (5, 10) - - assert new_scn1['3'].shape == (3, 6) - assert 'area' in new_scn1['3'].attrs - assert new_scn1['3'].attrs['area'].shape == (3, 6) - assert new_scn1['3a'].shape == (3, 6) - a_var = new_scn1['3a'].attrs['ancillary_variables'][0] + assert new_scn["1"].shape == (5, 10) + assert new_scn["2"].shape == (5, 10) + + assert new_scn1["3"].shape == (3, 6) + assert "area" in new_scn1["3"].attrs + assert new_scn1["3"].attrs["area"].shape == (3, 6) + assert new_scn1["3a"].shape == (3, 6) + a_var = new_scn1["3a"].attrs["ancillary_variables"][0] assert a_var.shape == (3, 6) - assert new_scn2['4'].shape == (3, 6) - assert 'area' in new_scn2['4'].attrs - assert new_scn2['4'].attrs['area'].shape == (3, 6) - assert new_scn2['4a'].shape == (3, 6) - a_var = new_scn2['4a'].attrs['ancillary_variables'][0] + assert new_scn2["4"].shape == (3, 6) + assert "area" in new_scn2["4"].attrs + assert new_scn2["4"].attrs["area"].shape == (3, 6) + assert new_scn2["4a"].shape == (3, 6) + a_var = new_scn2["4a"].attrs["ancillary_variables"][0] assert a_var.shape == (3, 6) def test_contains(self): """Test contains.""" scene = Scene() scene["1"] = xr.DataArray(np.arange(5), - attrs={'wavelength': (0.1, 0.2, 0.3), - '_satpy_id_keys': default_id_keys_config}) - assert '1' in scene + attrs={"wavelength": (0.1, 0.2, 0.3), + "_satpy_id_keys": default_id_keys_config}) + assert "1" in scene assert 0.15 in scene - assert '2' not in scene + assert "2" not in scene assert 0.31 not in scene scene = Scene() - scene['blueberry'] = xr.DataArray(np.arange(5)) - scene['blackberry'] = xr.DataArray(np.arange(5)) - scene['strawberry'] = xr.DataArray(np.arange(5)) - scene['raspberry'] = xr.DataArray(np.arange(5)) + scene["blueberry"] = xr.DataArray(np.arange(5)) + scene["blackberry"] = xr.DataArray(np.arange(5)) + scene["strawberry"] = xr.DataArray(np.arange(5)) + scene["raspberry"] = xr.DataArray(np.arange(5)) # deepcode ignore replace~keys~list~compare: This is on purpose - assert make_cid(name='blueberry') in scene.keys() - assert make_cid(name='blueberry') in scene - assert 'blueberry' in scene - assert 'blueberry' not in scene.keys() + assert make_cid(name="blueberry") in scene.keys() + assert make_cid(name="blueberry") in scene + assert "blueberry" in scene + assert "blueberry" not in scene.keys() def test_delitem(self): """Test deleting an item.""" scene = Scene() scene["1"] = xr.DataArray(np.arange(5), - attrs={'wavelength': (0.1, 0.2, 0.3), - '_satpy_id_keys': default_id_keys_config}) + attrs={"wavelength": (0.1, 0.2, 0.3), + "_satpy_id_keys": default_id_keys_config}) scene["2"] = xr.DataArray(np.arange(5), - attrs={'wavelength': (0.4, 0.5, 0.6), - '_satpy_id_keys': default_id_keys_config}) + attrs={"wavelength": (0.4, 0.5, 0.6), + "_satpy_id_keys": default_id_keys_config}) scene["3"] = xr.DataArray(np.arange(5), - attrs={'wavelength': (0.7, 0.8, 0.9), - '_satpy_id_keys': default_id_keys_config}) - del scene['1'] - del scene['3'] + attrs={"wavelength": (0.7, 0.8, 0.9), + "_satpy_id_keys": default_id_keys_config}) + del scene["1"] + del scene["3"] del scene[0.45] assert not scene._wishlist assert not list(scene._datasets.keys()) @@ -268,7 +269,7 @@ def _create_coarest_finest_data_array(shape, area_def, attrs=None): data_arr = xr.DataArray( da.arange(math.prod(shape)).reshape(shape), attrs={ - 'area': area_def, + "area": area_def, }) if attrs: data_arr.attrs.update(attrs) @@ -277,11 +278,11 @@ def _create_coarest_finest_data_array(shape, area_def, attrs=None): def _create_coarsest_finest_area_def(shape, extents): from pyresample import AreaDefinition - proj_str = '+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs' + proj_str = "+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs" area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_str, shape[1], shape[0], @@ -331,7 +332,7 @@ def test_coarsest_finest_area_different_shape(self, coarse_area, fine_area): assert scn.coarsest_area() is coarse_area assert scn.finest_area() is fine_area - assert scn.coarsest_area(['2', '3']) is fine_area + assert scn.coarsest_area(["2", "3"]) is fine_area @pytest.mark.parametrize( ("area_def", "shifted_area"), @@ -375,24 +376,24 @@ class TestComputePersist: def test_compute_pass_through(self): """Test pass through of xarray compute.""" import numpy as np - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"]) scene = scene.compute() - assert isinstance(scene['ds1'].data, np.ndarray) + assert isinstance(scene["ds1"].data, np.ndarray) def test_persist_pass_through(self): """Test pass through of xarray persist.""" from dask.array.utils import assert_eq - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"]) scenep = scene.persist() - assert_eq(scene['ds1'].data, scenep['ds1'].data) - assert set(scenep['ds1'].data.dask).issubset(scene['ds1'].data.dask) - assert len(scenep["ds1"].data.dask) == scenep['ds1'].data.npartitions + assert_eq(scene["ds1"].data, scenep["ds1"].data) + assert set(scenep["ds1"].data.dask).issubset(scene["ds1"].data.dask) + assert len(scenep["ds1"].data.dask) == scenep["ds1"].data.npartitions def test_chunk_pass_through(self): """Test pass through of xarray chunk.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"]) scene = scene.chunk(chunks=2) - assert scene['ds1'].data.chunksize == (2, 2) + assert scene["ds1"].data.chunksize == (2, 2) diff --git a/satpy/tests/scene_tests/test_init.py b/satpy/tests/scene_tests/test_init.py index 4caf804366..b745fad9d0 100644 --- a/satpy/tests/scene_tests/test_init.py +++ b/satpy/tests/scene_tests/test_init.py @@ -38,49 +38,51 @@ class TestScene: def test_init(self): """Test scene initialization.""" - with mock.patch('satpy.scene.Scene._create_reader_instances') as cri: + with mock.patch("satpy.scene.Scene._create_reader_instances") as cri: cri.return_value = {} - Scene(filenames=['bla'], reader='blo') - cri.assert_called_once_with(filenames=['bla'], reader='blo', + Scene(filenames=["bla"], reader="blo") + cri.assert_called_once_with(filenames=["bla"], reader="blo", reader_kwargs=None) def test_init_str_filename(self): """Test initializing with a single string as filenames.""" - pytest.raises(ValueError, Scene, reader='blo', filenames='test.nc') + with pytest.raises(ValueError, match="'filenames' must be a list of files: .*"): + Scene(reader="blo", filenames="test.nc") def test_start_end_times(self): """Test start and end times for a scene.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") assert scene.start_time == FAKE_FILEHANDLER_START assert scene.end_time == FAKE_FILEHANDLER_END def test_init_preserve_reader_kwargs(self): """Test that the initialization preserves the kwargs.""" cri = spy_decorator(Scene._create_reader_instances) - with mock.patch('satpy.scene.Scene._create_reader_instances', cri): - reader_kwargs = {'calibration_type': 'gsics'} - scene = Scene(filenames=['fake1_1.txt'], - reader='fake1', - filter_parameters={'area': 'euron1'}, + with mock.patch("satpy.scene.Scene._create_reader_instances", cri): + reader_kwargs = {"calibration_type": "gsics"} + scene = Scene(filenames=["fake1_1.txt"], + reader="fake1", + filter_parameters={"area": "euron1"}, reader_kwargs=reader_kwargs) - assert reader_kwargs is not cri.mock.call_args[1]['reader_kwargs'] + assert reader_kwargs is not cri.mock.call_args[1]["reader_kwargs"] assert scene.start_time == FAKE_FILEHANDLER_START assert scene.end_time == FAKE_FILEHANDLER_END def test_init_alone(self): """Test simple initialization.""" scn = Scene() - assert not scn._readers, 'Empty scene should not load any readers' + assert not scn._readers, "Empty scene should not load any readers" def test_init_no_files(self): """Test that providing an empty list of filenames fails.""" - pytest.raises(ValueError, Scene, reader='viirs_sdr', filenames=[]) + with pytest.raises(ValueError, match="'filenames' was provided but is empty."): + Scene(reader="viirs_sdr", filenames=[]) def test_create_reader_instances_with_filenames(self): """Test creating a reader providing filenames.""" filenames = ["bla", "foo", "bar"] reader_name = None - with mock.patch('satpy.scene.load_readers') as findermock: + with mock.patch("satpy.scene.load_readers") as findermock: Scene(filenames=filenames) findermock.assert_called_once_with( filenames=filenames, @@ -112,7 +114,7 @@ def test_create_reader_instances_with_reader(self): """Test createring a reader instance providing the reader name.""" reader = "foo" filenames = ["1", "2", "3"] - with mock.patch('satpy.scene.load_readers') as findermock: + with mock.patch("satpy.scene.load_readers") as findermock: findermock.return_value = {} Scene(reader=reader, filenames=filenames) findermock.assert_called_once_with(reader=reader, @@ -123,29 +125,29 @@ def test_create_reader_instances_with_reader(self): def test_create_reader_instances_with_reader_kwargs(self): """Test creating a reader instance with reader kwargs.""" from satpy.readers.yaml_reader import FileYAMLReader - reader_kwargs = {'calibration_type': 'gsics'} - filter_parameters = {'area': 'euron1'} - reader_kwargs2 = {'calibration_type': 'gsics', 'filter_parameters': filter_parameters} + reader_kwargs = {"calibration_type": "gsics"} + filter_parameters = {"area": "euron1"} + reader_kwargs2 = {"calibration_type": "gsics", "filter_parameters": filter_parameters} rinit = spy_decorator(FileYAMLReader.create_filehandlers) - with mock.patch('satpy.readers.yaml_reader.FileYAMLReader.create_filehandlers', rinit): - scene = Scene(filenames=['fake1_1.txt'], - reader='fake1', - filter_parameters={'area': 'euron1'}, + with mock.patch("satpy.readers.yaml_reader.FileYAMLReader.create_filehandlers", rinit): + scene = Scene(filenames=["fake1_1.txt"], + reader="fake1", + filter_parameters={"area": "euron1"}, reader_kwargs=reader_kwargs) del scene - assert reader_kwargs == rinit.mock.call_args[1]['fh_kwargs'] + assert reader_kwargs == rinit.mock.call_args[1]["fh_kwargs"] rinit.mock.reset_mock() - scene = Scene(filenames=['fake1_1.txt'], - reader='fake1', + scene = Scene(filenames=["fake1_1.txt"], + reader="fake1", reader_kwargs=reader_kwargs2) - assert reader_kwargs == rinit.mock.call_args[1]['fh_kwargs'] + assert reader_kwargs == rinit.mock.call_args[1]["fh_kwargs"] del scene def test_create_multiple_reader_different_kwargs(self, include_test_etc): """Test passing different kwargs to different readers.""" from satpy.readers import load_reader - with mock.patch.object(satpy.readers, 'load_reader', wraps=load_reader) as lr: + with mock.patch.object(satpy.readers, "load_reader", wraps=load_reader) as lr: Scene(filenames={"fake1_1ds": ["fake1_1ds_1.txt"], "fake2_1ds": ["fake2_1ds_1.txt"]}, reader_kwargs={ @@ -153,8 +155,8 @@ def test_create_multiple_reader_different_kwargs(self, include_test_etc): "fake2_1ds": {"mouth": "varallo"} }) lr.assert_has_calls([ - mock.call([os.path.join(include_test_etc, 'readers', 'fake1_1ds.yaml')], mouth="omegna"), - mock.call([os.path.join(include_test_etc, 'readers', 'fake2_1ds.yaml')], mouth="varallo")]) + mock.call([os.path.join(include_test_etc, "readers", "fake1_1ds.yaml")], mouth="omegna"), + mock.call([os.path.join(include_test_etc, "readers", "fake2_1ds.yaml")], mouth="varallo")]) def test_storage_options_from_reader_kwargs_no_options(self): """Test getting storage options from reader kwargs. @@ -162,8 +164,8 @@ def test_storage_options_from_reader_kwargs_no_options(self): Case where there are no options given. """ filenames = ["s3://data-bucket/file1", "s3://data-bucket/file2", "s3://data-bucket/file3"] - with mock.patch('satpy.scene.load_readers'): - with mock.patch('fsspec.open_files') as open_files: + with mock.patch("satpy.scene.load_readers"): + with mock.patch("fsspec.open_files") as open_files: Scene(filenames=filenames) open_files.assert_called_once_with(filenames) @@ -173,13 +175,13 @@ def test_storage_options_from_reader_kwargs_single_dict_no_options(self): Case where a single dict is given for all readers without storage options. """ filenames = ["s3://data-bucket/file1", "s3://data-bucket/file2", "s3://data-bucket/file3"] - reader_kwargs = {'reader_opt': 'foo'} - with mock.patch('satpy.scene.load_readers'): - with mock.patch('fsspec.open_files') as open_files: + reader_kwargs = {"reader_opt": "foo"} + with mock.patch("satpy.scene.load_readers"): + with mock.patch("fsspec.open_files") as open_files: Scene(filenames=filenames, reader_kwargs=reader_kwargs) open_files.assert_called_once_with(filenames) - @pytest.mark.parametrize("reader_kwargs", [{}, {'reader_opt': 'foo'}]) + @pytest.mark.parametrize("reader_kwargs", [{}, {"reader_opt": "foo"}]) def test_storage_options_from_reader_kwargs_single_dict(self, reader_kwargs): """Test getting storage options from reader kwargs. @@ -187,14 +189,14 @@ def test_storage_options_from_reader_kwargs_single_dict(self, reader_kwargs): """ filenames = ["s3://data-bucket/file1", "s3://data-bucket/file2", "s3://data-bucket/file3"] expected_reader_kwargs = reader_kwargs.copy() - storage_options = {'option1': '1'} - reader_kwargs['storage_options'] = storage_options + storage_options = {"option1": "1"} + reader_kwargs["storage_options"] = storage_options orig_reader_kwargs = deepcopy(reader_kwargs) - with mock.patch('satpy.scene.load_readers') as load_readers: - with mock.patch('fsspec.open_files') as open_files: + with mock.patch("satpy.scene.load_readers") as load_readers: + with mock.patch("fsspec.open_files") as open_files: Scene(filenames=filenames, reader_kwargs=reader_kwargs) call_ = load_readers.mock_calls[0] - assert call_.kwargs['reader_kwargs'] == expected_reader_kwargs + assert call_.kwargs["reader_kwargs"] == expected_reader_kwargs open_files.assert_called_once_with(filenames, **storage_options) assert reader_kwargs == orig_reader_kwargs @@ -208,25 +210,25 @@ def test_storage_options_from_reader_kwargs_per_reader(self): "reader2": ["s3://data-bucket/file2"], "reader3": ["s3://data-bucket/file3"], } - storage_options_1 = {'option1': '1'} - storage_options_2 = {'option2': '2'} - storage_options_3 = {'option3': '3'} + storage_options_1 = {"option1": "1"} + storage_options_2 = {"option2": "2"} + storage_options_3 = {"option3": "3"} reader_kwargs = { - "reader1": {'reader_opt_1': 'foo'}, - "reader2": {'reader_opt_2': 'bar'}, - "reader3": {'reader_opt_3': 'baz'}, + "reader1": {"reader_opt_1": "foo"}, + "reader2": {"reader_opt_2": "bar"}, + "reader3": {"reader_opt_3": "baz"}, } expected_reader_kwargs = deepcopy(reader_kwargs) - reader_kwargs['reader1']['storage_options'] = storage_options_1 - reader_kwargs['reader2']['storage_options'] = storage_options_2 - reader_kwargs['reader3']['storage_options'] = storage_options_3 + reader_kwargs["reader1"]["storage_options"] = storage_options_1 + reader_kwargs["reader2"]["storage_options"] = storage_options_2 + reader_kwargs["reader3"]["storage_options"] = storage_options_3 orig_reader_kwargs = deepcopy(reader_kwargs) - with mock.patch('satpy.scene.load_readers') as load_readers: - with mock.patch('fsspec.open_files') as open_files: + with mock.patch("satpy.scene.load_readers") as load_readers: + with mock.patch("fsspec.open_files") as open_files: Scene(filenames=filenames, reader_kwargs=reader_kwargs) call_ = load_readers.mock_calls[0] - assert call_.kwargs['reader_kwargs'] == expected_reader_kwargs + assert call_.kwargs["reader_kwargs"] == expected_reader_kwargs assert mock.call(filenames["reader1"], **storage_options_1) in open_files.mock_calls assert mock.call(filenames["reader2"], **storage_options_2) in open_files.mock_calls assert mock.call(filenames["reader3"], **storage_options_3) in open_files.mock_calls @@ -244,15 +246,15 @@ def test_storage_options_from_reader_kwargs_per_reader_and_global(self): "reader3": ["s3://data-bucket/file3"], } reader_kwargs = { - "reader1": {'reader_opt_1': 'foo', 'storage_options': {'option1': '1'}}, - "reader2": {'reader_opt_2': 'bar', 'storage_options': {'option2': '2'}}, + "reader1": {"reader_opt_1": "foo", "storage_options": {"option1": "1"}}, + "reader2": {"reader_opt_2": "bar", "storage_options": {"option2": "2"}}, "storage_options": {"endpoint_url": "url"}, } orig_reader_kwargs = deepcopy(reader_kwargs) - with mock.patch('satpy.scene.load_readers'): - with mock.patch('fsspec.open_files') as open_files: + with mock.patch("satpy.scene.load_readers"): + with mock.patch("fsspec.open_files") as open_files: Scene(filenames=filenames, reader_kwargs=reader_kwargs) - assert mock.call(filenames["reader1"], option1='1', endpoint_url='url') in open_files.mock_calls - assert mock.call(filenames["reader2"], option2='2', endpoint_url='url') in open_files.mock_calls + assert mock.call(filenames["reader1"], option1="1", endpoint_url="url") in open_files.mock_calls + assert mock.call(filenames["reader2"], option2="2", endpoint_url="url") in open_files.mock_calls assert reader_kwargs == orig_reader_kwargs diff --git a/satpy/tests/scene_tests/test_load.py b/satpy/tests/scene_tests/test_load.py index 6eefbc0080..889d9e2cbe 100644 --- a/satpy/tests/scene_tests/test_load.py +++ b/satpy/tests/scene_tests/test_load.py @@ -36,7 +36,7 @@ class TestSceneAllAvailableDatasets: def test_all_datasets_no_readers(self): """Test all datasets with no reader.""" scene = Scene() - pytest.raises(KeyError, scene.all_dataset_ids, reader_name='fake') + pytest.raises(KeyError, scene.all_dataset_ids, reader_name="fake") id_list = scene.all_dataset_ids() assert id_list == [] # no sensors are loaded so we shouldn't get any comps either @@ -46,7 +46,7 @@ def test_all_datasets_no_readers(self): def test_all_dataset_names_no_readers(self): """Test all dataset names with no reader.""" scene = Scene() - pytest.raises(KeyError, scene.all_dataset_names, reader_name='fake') + pytest.raises(KeyError, scene.all_dataset_names, reader_name="fake") name_list = scene.all_dataset_names() assert name_list == [] # no sensors are loaded so we shouldn't get any comps either @@ -57,7 +57,7 @@ def test_available_dataset_no_readers(self): """Test the available datasets without a reader.""" scene = Scene() pytest.raises( - KeyError, scene.available_dataset_ids, reader_name='fake') + KeyError, scene.available_dataset_ids, reader_name="fake") name_list = scene.available_dataset_ids() assert name_list == [] # no sensors are loaded so we shouldn't get any comps either @@ -68,7 +68,7 @@ def test_available_dataset_names_no_readers(self): """Test the available dataset names without a reader.""" scene = Scene() pytest.raises( - KeyError, scene.available_dataset_names, reader_name='fake') + KeyError, scene.available_dataset_names, reader_name="fake") name_list = scene.available_dataset_names() assert name_list == [] # no sensors are loaded so we shouldn't get any comps either @@ -77,8 +77,8 @@ def test_available_dataset_names_no_readers(self): def test_all_datasets_one_reader(self): """Test all datasets for one reader.""" - scene = Scene(filenames=['fake1_1.txt'], - reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], + reader="fake1") id_list = scene.all_dataset_ids() # 20 data products + 6 lon/lat products num_reader_ds = 21 + 6 @@ -88,8 +88,8 @@ def test_all_datasets_one_reader(self): def test_all_datasets_multiple_reader(self): """Test all datasets for multiple readers.""" - scene = Scene(filenames={'fake1_1ds': ['fake1_1ds_1.txt'], - 'fake2_1ds': ['fake2_1ds_1.txt']}) + scene = Scene(filenames={"fake1_1ds": ["fake1_1ds_1.txt"], + "fake2_1ds": ["fake2_1ds_1.txt"]}) id_list = scene.all_dataset_ids() assert len(id_list) == 2 id_list = scene.all_dataset_ids(composites=True) @@ -99,8 +99,8 @@ def test_all_datasets_multiple_reader(self): def test_available_datasets_one_reader(self): """Test the available datasets for one reader.""" - scene = Scene(filenames=['fake1_1ds_1.txt'], - reader='fake1_1ds') + scene = Scene(filenames=["fake1_1ds_1.txt"], + reader="fake1_1ds") id_list = scene.available_dataset_ids() assert len(id_list) == 1 id_list = scene.available_dataset_ids(composites=True) @@ -109,13 +109,13 @@ def test_available_datasets_one_reader(self): def test_available_composite_ids_missing_available(self): """Test available_composite_ids when a composites dep is missing.""" - scene = Scene(filenames=['fake1_1ds_1.txt'], - reader='fake1_1ds') - assert 'comp2' not in scene.available_composite_names() + scene = Scene(filenames=["fake1_1ds_1.txt"], + reader="fake1_1ds") + assert "comp2" not in scene.available_composite_names() def test_available_composites_known_versus_all(self): """Test available_composite_ids when some datasets aren't available.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1', + scene = Scene(filenames=["fake1_1.txt"], reader="fake1", reader_kwargs={"not_available": ["ds2", "ds3"]}) all_comps = scene.all_composite_names() avail_comps = scene.available_composite_names() @@ -127,11 +127,11 @@ def test_available_composites_known_versus_all(self): def test_available_comps_no_deps(self): """Test Scene available composites when composites don't have a dependency.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") all_comp_ids = scene.available_composite_ids() - assert make_cid(name='static_image') in all_comp_ids + assert make_cid(name="static_image") in all_comp_ids available_comp_ids = scene.available_composite_ids() - assert make_cid(name='static_image') in available_comp_ids + assert make_cid(name="static_image") in available_comp_ids def test_available_when_sensor_none_in_preloaded_dataarrays(self): """Test Scene available composites when existing loaded arrays have sensor set to None. @@ -143,7 +143,7 @@ def test_available_when_sensor_none_in_preloaded_dataarrays(self): """ scene = _scene_with_data_array_none_sensor() available_comp_ids = scene.available_composite_ids() - assert make_cid(name='static_image') in available_comp_ids + assert make_cid(name="static_image") in available_comp_ids @pytest.mark.usefixtures("include_test_etc") @@ -152,13 +152,13 @@ class TestBadLoading: def test_load_str(self): """Test passing a string to Scene.load.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - pytest.raises(TypeError, scene.load, 'ds1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + pytest.raises(TypeError, scene.load, "ds1") def test_load_no_exist(self): """Test loading a dataset that doesn't exist.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - pytest.raises(KeyError, scene.load, ['im_a_dataset_that_doesnt_exist']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + pytest.raises(KeyError, scene.load, ["im_a_dataset_that_doesnt_exist"]) @pytest.mark.usefixtures("include_test_etc") @@ -169,50 +169,50 @@ def test_load_no_exist2(self): """Test loading a dataset that doesn't exist then another load.""" from satpy.readers.yaml_reader import FileYAMLReader load_mock = spy_decorator(FileYAMLReader.load) - with mock.patch.object(FileYAMLReader, 'load', load_mock): + with mock.patch.object(FileYAMLReader, "load", load_mock): lmock = load_mock.mock - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds9_fail_load']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds9_fail_load"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 0 lmock.assert_called_once_with( - {make_dataid(name='ds9_fail_load', wavelength=(1.0, 1.1, 1.2))}) + {make_dataid(name="ds9_fail_load", wavelength=(1.0, 1.1, 1.2))}) - scene.load(['ds1']) + scene.load(["ds1"]) loaded_ids = list(scene._datasets.keys()) assert lmock.call_count == 2 # most recent call should have only been ds1 lmock.assert_called_with({ - make_dataid(name='ds1', resolution=250, calibration='reflectance', modifiers=tuple()), + make_dataid(name="ds1", resolution=250, calibration="reflectance", modifiers=tuple()), }) assert len(loaded_ids) == 1 def test_load_ds1_no_comps(self): """Test loading one dataset with no loaded compositors.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0] == make_dataid(name='ds1', resolution=250, calibration='reflectance', modifiers=tuple()) + assert loaded_ids[0] == make_dataid(name="ds1", resolution=250, calibration="reflectance", modifiers=tuple()) def test_load_ds1_load_twice(self): """Test loading one dataset with no loaded compositors.""" from satpy.readers.yaml_reader import FileYAMLReader - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0] == make_dataid(name='ds1', resolution=250, calibration='reflectance', modifiers=tuple()) + assert loaded_ids[0] == make_dataid(name="ds1", resolution=250, calibration="reflectance", modifiers=tuple()) load_mock = spy_decorator(FileYAMLReader.load) - with mock.patch.object(FileYAMLReader, 'load', load_mock): + with mock.patch.object(FileYAMLReader, "load", load_mock): lmock = load_mock.mock - scene.load(['ds1']) + scene.load(["ds1"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0] == make_dataid(name='ds1', + assert loaded_ids[0] == make_dataid(name="ds1", resolution=250, - calibration='reflectance', + calibration="reflectance", modifiers=tuple()) assert not lmock.called, ("Reader.load was called again when " "loading something that's already " @@ -220,17 +220,17 @@ def test_load_ds1_load_twice(self): def test_load_ds1_unknown_modifier(self): """Test loading one dataset with no loaded compositors.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") pytest.raises(KeyError, scene.load, - [make_dataid(name='ds1', modifiers=('_fake_bad_mod_',))]) + [make_dataid(name="ds1", modifiers=("_fake_bad_mod_",))]) def test_load_ds4_cal(self): """Test loading a dataset that has two calibration variations.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds4']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds4"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0]['calibration'] == 'reflectance' + assert loaded_ids[0]["calibration"] == "reflectance" @pytest.mark.parametrize( ("input_filenames", "load_kwargs", "exp_resolution"), @@ -243,37 +243,37 @@ def test_load_ds4_cal(self): ) def test_load_ds5_variations(self, input_filenames, load_kwargs, exp_resolution): """Test loading a dataset has multiple resolutions available.""" - scene = Scene(filenames=input_filenames, reader='fake1') - scene.load(['ds5'], **load_kwargs) + scene = Scene(filenames=input_filenames, reader="fake1") + scene.load(["ds5"], **load_kwargs) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0]['name'] == 'ds5' - assert loaded_ids[0]['resolution'] == exp_resolution + assert loaded_ids[0]["name"] == "ds5" + assert loaded_ids[0]["resolution"] == exp_resolution def test_load_ds5_multiple_resolution_loads(self): """Test loading a dataset with multiple resolutions available as separate loads.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds5'], resolution=1000) - scene.load(['ds5'], resolution=500) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds5"], resolution=1000) + scene.load(["ds5"], resolution=500) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 - assert loaded_ids[0]['name'] == 'ds5' - assert loaded_ids[0]['resolution'] == 500 - assert loaded_ids[1]['name'] == 'ds5' - assert loaded_ids[1]['resolution'] == 1000 + assert loaded_ids[0]["name"] == "ds5" + assert loaded_ids[0]["resolution"] == 500 + assert loaded_ids[1]["name"] == "ds5" + assert loaded_ids[1]["resolution"] == 1000 def test_load_ds6_wl(self): """Test loading a dataset by wavelength.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load([0.22]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0]['name'] == 'ds6' + assert loaded_ids[0]["name"] == "ds6" def test_load_ds9_fail_load(self): """Test loading a dataset that will fail during load.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds9_fail_load']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds9_fail_load"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 0 @@ -308,7 +308,7 @@ class TestLoadingComposites: ) def test_single_composite_loading(self, comp_name, exp_id_or_name): """Test that certain composites can be loaded individually.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load([comp_name]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 @@ -319,33 +319,33 @@ def test_single_composite_loading(self, comp_name, exp_id_or_name): def test_load_multiple_resolutions(self): """Test loading a dataset has multiple resolutions available with different resolutions.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - comp25 = make_cid(name='comp25', resolution=1000) - scene[comp25] = xr.DataArray([], attrs={'name': 'comp25', 'resolution': 1000}) - scene.load(['comp25'], resolution=500) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + comp25 = make_cid(name="comp25", resolution=1000) + scene[comp25] = xr.DataArray([], attrs={"name": "comp25", "resolution": 1000}) + scene.load(["comp25"], resolution=500) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 - assert loaded_ids[0]['name'] == 'comp25' - assert loaded_ids[0]['resolution'] == 500 - assert loaded_ids[1]['name'] == 'comp25' - assert loaded_ids[1]['resolution'] == 1000 + assert loaded_ids[0]["name"] == "comp25" + assert loaded_ids[0]["resolution"] == 500 + assert loaded_ids[1]["name"] == "comp25" + assert loaded_ids[1]["resolution"] == 1000 def test_load_same_subcomposite(self): """Test loading a composite and one of it's subcomposites at the same time.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp24', 'comp25'], resolution=500) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp24", "comp25"], resolution=500) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 - assert loaded_ids[0]['name'] == 'comp24' - assert loaded_ids[0]['resolution'] == 500 - assert loaded_ids[1]['name'] == 'comp25' - assert loaded_ids[1]['resolution'] == 500 + assert loaded_ids[0]["name"] == "comp24" + assert loaded_ids[0]["resolution"] == 500 + assert loaded_ids[1]["name"] == "comp25" + assert loaded_ids[1]["resolution"] == 500 def test_load_comp8(self): """Test loading a composite that has a non-existent prereq.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - pytest.raises(KeyError, scene.load, ['comp8']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + pytest.raises(KeyError, scene.load, ["comp8"]) def test_load_comp15(self): """Test loading a composite whose prerequisites can't be loaded. @@ -354,23 +354,23 @@ def test_load_comp15(self): """ # it is fine that an optional prereq doesn't exist - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp15']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp15"]) loaded_ids = list(scene._datasets.keys()) assert not loaded_ids def test_load_comp17(self): """Test loading a composite that depends on a composite that won't load.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp17']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp17"]) loaded_ids = list(scene._datasets.keys()) assert not loaded_ids def test_load_comp18(self): """Test loading a composite that depends on an incompatible area modified dataset.""" # it is fine that an optional prereq doesn't exist - scene = Scene(filenames=['fake1_1.txt', 'fake1_highres_1.txt'], reader='fake1') - scene.load(['comp18']) + scene = Scene(filenames=["fake1_1.txt", "fake1_highres_1.txt"], reader="fake1") + scene.load(["comp18"]) loaded_ids = list(scene._datasets.keys()) # depends on: # ds3 @@ -379,11 +379,11 @@ def test_load_comp18(self): # We should end up with ds3, ds4 (mod1, mod3), ds5 (mod1), and ds1 # for the incomp_areas modifier assert len(loaded_ids) == 4 # the 1 dependencies - assert 'ds3' in scene._datasets - assert make_dataid(name='ds4', calibration='reflectance', - modifiers=('mod1', 'mod3')) in scene._datasets - assert make_dataid(name='ds5', resolution=250, - modifiers=('mod1',)) in scene._datasets + assert "ds3" in scene._datasets + assert make_dataid(name="ds4", calibration="reflectance", + modifiers=("mod1", "mod3")) in scene._datasets + assert make_dataid(name="ds5", resolution=250, + modifiers=("mod1",)) in scene._datasets def test_load_comp18_2(self): """Test loading a composite that depends on an incompatible area modified dataset. @@ -393,8 +393,8 @@ def test_load_comp18_2(self): """ # it is fine that an optional prereq doesn't exist - scene = Scene(filenames=['fake1_1.txt', 'fake1_highres_1.txt'], reader='fake1') - scene.load(['comp18_2']) + scene = Scene(filenames=["fake1_1.txt", "fake1_highres_1.txt"], reader="fake1") + scene.load(["comp18_2"]) loaded_ids = list(scene._datasets.keys()) # depends on: # ds3 @@ -403,12 +403,12 @@ def test_load_comp18_2(self): # We should end up with ds3, ds4 (mod1, mod3), ds5 (mod1), and ds1 # and ds2 for the incomp_areas_opt modifier assert len(loaded_ids) == 5 # the 1 dependencies - assert 'ds3' in scene._datasets - assert 'ds2' in scene._datasets - assert make_dataid(name='ds4', calibration='reflectance', - modifiers=('mod1', 'mod3')) in scene._datasets - assert make_dataid(name='ds5', resolution=250, - modifiers=('mod1',)) in scene._datasets + assert "ds3" in scene._datasets + assert "ds2" in scene._datasets + assert make_dataid(name="ds4", calibration="reflectance", + modifiers=("mod1", "mod3")) in scene._datasets + assert make_dataid(name="ds5", resolution=250, + modifiers=("mod1",)) in scene._datasets def test_load_comp19(self): """Test loading a composite that shares a dep with a dependency. @@ -421,79 +421,79 @@ def test_load_comp19(self): """ # Check dependency tree nodes # initialize the dep tree without loading the data - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene._update_dependency_tree({'comp19'}, None) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene._update_dependency_tree({"comp19"}, None) - this_node = scene._dependency_tree['comp19'] - shared_dep_id = make_dataid(name='ds5', modifiers=('res_change',)) + this_node = scene._dependency_tree["comp19"] + shared_dep_id = make_dataid(name="ds5", modifiers=("res_change",)) shared_dep_expected_node = scene._dependency_tree[shared_dep_id] # get the node for the first dep in the prereqs list of the # comp13 node - shared_dep_node = scene._dependency_tree['comp13'].data[1][0] + shared_dep_node = scene._dependency_tree["comp13"].data[1][0] shared_dep_node2 = this_node.data[1][0] assert shared_dep_expected_node is shared_dep_node assert shared_dep_expected_node is shared_dep_node2 - scene.load(['comp19']) + scene.load(["comp19"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0] == make_cid(name='comp19') + assert loaded_ids[0] == make_cid(name="comp19") def test_load_multiple_comps(self): """Test loading multiple composites.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp1', 'comp2', 'comp3', 'comp4', 'comp5', 'comp6', - 'comp7', 'comp9', 'comp10']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp1", "comp2", "comp3", "comp4", "comp5", "comp6", + "comp7", "comp9", "comp10"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 9 def test_load_multiple_comps_separate(self): """Test loading multiple composites, one at a time.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp10']) - scene.load(['comp9']) - scene.load(['comp7']) - scene.load(['comp6']) - scene.load(['comp5']) - scene.load(['comp4']) - scene.load(['comp3']) - scene.load(['comp2']) - scene.load(['comp1']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp10"]) + scene.load(["comp9"]) + scene.load(["comp7"]) + scene.load(["comp6"]) + scene.load(["comp5"]) + scene.load(["comp4"]) + scene.load(["comp3"]) + scene.load(["comp2"]) + scene.load(["comp1"]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 9 def test_load_modified(self): """Test loading a modified dataset.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load([make_dsq(name='ds1', modifiers=('mod1', 'mod2'))]) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load([make_dsq(name="ds1", modifiers=("mod1", "mod2"))]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0]['modifiers'] == ('mod1', 'mod2') + assert loaded_ids[0]["modifiers"] == ("mod1", "mod2") def test_load_modified_with_load_kwarg(self): """Test loading a modified dataset using the ``Scene.load`` keyword argument.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['ds1'], modifiers=('mod1', 'mod2')) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["ds1"], modifiers=("mod1", "mod2")) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - assert loaded_ids[0]['modifiers'] == ('mod1', 'mod2') + assert loaded_ids[0]["modifiers"] == ("mod1", "mod2") def test_load_multiple_modified(self): """Test loading multiple modified datasets.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load([ - make_dataid(name='ds1', modifiers=('mod1', 'mod2')), - make_dataid(name='ds2', modifiers=('mod2', 'mod1')), + make_dataid(name="ds1", modifiers=("mod1", "mod2")), + make_dataid(name="ds2", modifiers=("mod2", "mod1")), ]) loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 for i in loaded_ids: - if i['name'] == 'ds1': - assert i['modifiers'] == ('mod1', 'mod2') + if i["name"] == "ds1": + assert i["modifiers"] == ("mod1", "mod2") else: - assert i['name'] == 'ds2' - assert i['modifiers'] == ('mod2', 'mod1') + assert i["name"] == "ds2" + assert i["modifiers"] == ("mod2", "mod1") def test_load_dataset_after_composite(self): """Test load composite followed by other datasets.""" @@ -501,15 +501,15 @@ def test_load_dataset_after_composite(self): from satpy.tests.utils import FakeCompositor load_mock = spy_decorator(FileYAMLReader.load) comp_mock = spy_decorator(FakeCompositor.__call__) - with mock.patch.object(FileYAMLReader, 'load', load_mock), \ - mock.patch.object(FakeCompositor, '__call__', comp_mock): + with mock.patch.object(FileYAMLReader, "load", load_mock), \ + mock.patch.object(FakeCompositor, "__call__", comp_mock): lmock = load_mock.mock - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp3']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp3"]) assert lmock.call_count == 1 - scene.load(['ds1']) + scene.load(["ds1"]) assert lmock.call_count == 2 - scene.load(['ds1']) + scene.load(["ds1"]) # we should only load from the file twice assert lmock.call_count == 2 # we should only generate the composite once @@ -524,36 +524,36 @@ def test_load_dataset_after_composite2(self): load_mock = spy_decorator(FileYAMLReader.load) comp_mock = spy_decorator(FakeCompositor.__call__) mod_mock = spy_decorator(FakeModifier.__call__) - with mock.patch.object(FileYAMLReader, 'load', load_mock), \ - mock.patch.object(FakeCompositor, '__call__', comp_mock), \ - mock.patch.object(FakeModifier, '__call__', mod_mock): + with mock.patch.object(FileYAMLReader, "load", load_mock), \ + mock.patch.object(FakeCompositor, "__call__", comp_mock), \ + mock.patch.object(FakeModifier, "__call__", mod_mock): lmock = load_mock.mock - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp10']) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp10"]) assert lmock.call_count == 1 loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 1 - with mock.patch.object(scene, '_generate_composites_nodes_from_loaded_datasets', + with mock.patch.object(scene, "_generate_composites_nodes_from_loaded_datasets", wraps=scene._generate_composites_nodes_from_loaded_datasets) as m: - scene.load(['ds1']) + scene.load(["ds1"]) assert lmock.call_count == 2 loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 # this is the unmodified ds1 assert make_dataid( - name='ds1', resolution=250, calibration='reflectance', modifiers=tuple() + name="ds1", resolution=250, calibration="reflectance", modifiers=tuple() ) in loaded_ids # m.assert_called_once_with(set([scene._dependency_tree['ds1']])) m.assert_called_once_with(set()) - with mock.patch.object(scene, '_generate_composites_nodes_from_loaded_datasets', + with mock.patch.object(scene, "_generate_composites_nodes_from_loaded_datasets", wraps=scene._generate_composites_nodes_from_loaded_datasets) as m: - scene.load(['ds1']) + scene.load(["ds1"]) assert lmock.call_count == 2 loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 # this is the unmodified ds1 assert make_dataid( - name='ds1', resolution=250, calibration='reflectance', modifiers=tuple() + name="ds1", resolution=250, calibration="reflectance", modifiers=tuple() ) in loaded_ids m.assert_called_once_with(set()) # we should only generate the comp10 composite once but comp2 was also generated @@ -567,17 +567,17 @@ def test_load_dataset_after_composite2(self): def test_no_generate_comp10(self): """Test generating a composite after loading.""" # it is fine that an optional prereq doesn't exist - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp10'], generate=False) - assert any(ds_id['name'] == 'comp10' for ds_id in scene._wishlist) - assert 'comp10' not in scene._datasets + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp10"], generate=False) + assert any(ds_id["name"] == "comp10" for ds_id in scene._wishlist) + assert "comp10" not in scene._datasets # two dependencies should have been loaded assert len(scene._datasets) == 2 assert len(scene.missing_datasets) == 1 scene._generate_composites_from_loaded_datasets() - assert any(ds_id['name'] == 'comp10' for ds_id in scene._wishlist) - assert 'comp10' in scene._datasets + assert any(ds_id["name"] == "comp10" for ds_id in scene._wishlist) + assert "comp10" in scene._datasets assert not scene.missing_datasets def test_modified_with_wl_dep(self): @@ -592,10 +592,10 @@ def test_modified_with_wl_dep(self): # Check dependency tree nodes # initialize the dep tree without loading the data - ds1_mod_id = make_dsq(name='ds1', modifiers=('mod_wl',)) - ds3_mod_id = make_dsq(name='ds3', modifiers=('mod_wl',)) + ds1_mod_id = make_dsq(name="ds1", modifiers=("mod_wl",)) + ds3_mod_id = make_dsq(name="ds3", modifiers=("mod_wl",)) - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene._update_dependency_tree({ds1_mod_id, ds3_mod_id}, None) ds1_mod_node = scene._dependency_tree[ds1_mod_id] @@ -603,10 +603,10 @@ def test_modified_with_wl_dep(self): ds1_mod_dep_node = ds1_mod_node.data[1][1] ds3_mod_dep_node = ds3_mod_node.data[1][1] # mod_wl depends on the this node: - ds6_modded_node = scene._dependency_tree[make_dataid(name='ds6', modifiers=('mod1',))] + ds6_modded_node = scene._dependency_tree[make_dataid(name="ds6", modifiers=("mod1",))] # this dep should be full qualified with name and wavelength - assert ds6_modded_node.name['name'] is not None - assert isinstance(ds6_modded_node.name['wavelength'], WavelengthRange) + assert ds6_modded_node.name["name"] is not None + assert isinstance(ds6_modded_node.name["wavelength"], WavelengthRange) # the node should be shared between everything that uses it assert ds1_mod_dep_node is ds3_mod_dep_node assert ds1_mod_dep_node is ds6_modded_node @@ -621,25 +621,25 @@ def test_modified_with_wl_dep(self): def test_load_comp11_and_23(self): """Test loading two composites that depend on similar wavelengths.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") # mock the available comps/mods in the compositor loader avail_comps = scene.available_composite_ids() - assert make_cid(name='comp11') in avail_comps - assert make_cid(name='comp23') in avail_comps + assert make_cid(name="comp11") in avail_comps + assert make_cid(name="comp23") in avail_comps # it is fine that an optional prereq doesn't exist - scene.load(['comp11', 'comp23']) - comp11_node = scene._dependency_tree['comp11'] - comp23_node = scene._dependency_tree['comp23'] - assert comp11_node.data[1][-1].name['name'] == 'ds10' - assert comp23_node.data[1][0].name['name'] == 'ds8' + scene.load(["comp11", "comp23"]) + comp11_node = scene._dependency_tree["comp11"] + comp23_node = scene._dependency_tree["comp23"] + assert comp11_node.data[1][-1].name["name"] == "ds10" + assert comp23_node.data[1][0].name["name"] == "ds8" loaded_ids = list(scene._datasets.keys()) assert len(loaded_ids) == 2 - assert 'comp11' in scene - assert 'comp23' in scene + assert "comp11" in scene + assert "comp23" in scene def test_load_too_many(self): """Test dependency tree if too many reader keys match.""" - scene = Scene(filenames=['fake3_1.txt'], reader='fake3') + scene = Scene(filenames=["fake3_1.txt"], reader="fake3") avail_comps = scene.available_composite_ids() # static image => 1 assert len(avail_comps) == 1 @@ -660,8 +660,8 @@ def test_load_when_sensor_none_in_preloaded_dataarrays(self): def _scene_with_data_array_none_sensor(): - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene['my_data'] = _data_array_none_sensor("my_data") + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene["my_data"] = _data_array_none_sensor("my_data") return scene diff --git a/satpy/tests/scene_tests/test_resampling.py b/satpy/tests/scene_tests/test_resampling.py index 39f9a50092..6b5f74ee59 100644 --- a/satpy/tests/scene_tests/test_resampling.py +++ b/satpy/tests/scene_tests/test_resampling.py @@ -39,40 +39,40 @@ def test_crop(self): scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) - proj_dict = {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, - 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'} + proj_dict = {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, + "lon_0": 0.0, "proj": "geos", "units": "m"} x_size = 3712 y_size = 3712 area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, x_size, y_size, area_extent, ) area_def2 = AreaDefinition( - 'test2', - 'test2', - 'test2', + "test2", + "test2", + "test2", proj_dict, x_size // 2, y_size // 2, area_extent, ) scene1["1"] = xr.DataArray(np.zeros((y_size, x_size))) - scene1["2"] = xr.DataArray(np.zeros((y_size, x_size)), dims=('y', 'x')) - scene1["3"] = xr.DataArray(np.zeros((y_size, x_size)), dims=('y', 'x'), - attrs={'area': area_def}) - scene1["4"] = xr.DataArray(np.zeros((y_size // 2, x_size // 2)), dims=('y', 'x'), - attrs={'area': area_def2}) + scene1["2"] = xr.DataArray(np.zeros((y_size, x_size)), dims=("y", "x")) + scene1["3"] = xr.DataArray(np.zeros((y_size, x_size)), dims=("y", "x"), + attrs={"area": area_def}) + scene1["4"] = xr.DataArray(np.zeros((y_size // 2, x_size // 2)), dims=("y", "x"), + attrs={"area": area_def2}) # by area crop_area = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, x_size, y_size, @@ -80,33 +80,33 @@ def test_crop(self): area_extent[2] - 10000., area_extent[3] - 500000.) ) new_scn1 = scene1.crop(crop_area) - assert '1' in new_scn1 - assert '2' in new_scn1 - assert '3' in new_scn1 - assert new_scn1['1'].shape == (y_size, x_size) - assert new_scn1['2'].shape == (y_size, x_size) - assert new_scn1['3'].shape == (3380, 3708) - assert new_scn1['4'].shape == (1690, 1854) + assert "1" in new_scn1 + assert "2" in new_scn1 + assert "3" in new_scn1 + assert new_scn1["1"].shape == (y_size, x_size) + assert new_scn1["2"].shape == (y_size, x_size) + assert new_scn1["3"].shape == (3380, 3708) + assert new_scn1["4"].shape == (1690, 1854) # by lon/lat bbox new_scn1 = scene1.crop(ll_bbox=(-20., -5., 0, 0)) - assert '1' in new_scn1 - assert '2' in new_scn1 - assert '3' in new_scn1 - assert new_scn1['1'].shape == (y_size, x_size) - assert new_scn1['2'].shape == (y_size, x_size) - assert new_scn1['3'].shape == (184, 714) - assert new_scn1['4'].shape == (92, 357) + assert "1" in new_scn1 + assert "2" in new_scn1 + assert "3" in new_scn1 + assert new_scn1["1"].shape == (y_size, x_size) + assert new_scn1["2"].shape == (y_size, x_size) + assert new_scn1["3"].shape == (184, 714) + assert new_scn1["4"].shape == (92, 357) # by x/y bbox new_scn1 = scene1.crop(xy_bbox=(-200000., -100000., 0, 0)) - assert '1' in new_scn1 - assert '2' in new_scn1 - assert '3' in new_scn1 - assert new_scn1['1'].shape == (y_size, x_size) - assert new_scn1['2'].shape == (y_size, x_size) - assert new_scn1['3'].shape == (36, 70) - assert new_scn1['4'].shape == (18, 35) + assert "1" in new_scn1 + assert "2" in new_scn1 + assert "3" in new_scn1 + assert new_scn1["1"].shape == (y_size, x_size) + assert new_scn1["2"].shape == (y_size, x_size) + assert new_scn1["3"].shape == (36, 70) + assert new_scn1["4"].shape == (18, 35) def test_crop_epsg_crs(self): """Test the crop method when source area uses an EPSG code.""" @@ -117,18 +117,18 @@ def test_crop_epsg_crs(self): x_size = 3712 y_size = 3712 area_def = AreaDefinition( - 'test', 'test', 'test', + "test", "test", "test", "EPSG:32630", x_size, y_size, area_extent, ) - scene1["1"] = xr.DataArray(np.zeros((y_size, x_size)), dims=('y', 'x'), - attrs={'area': area_def}) + scene1["1"] = xr.DataArray(np.zeros((y_size, x_size)), dims=("y", "x"), + attrs={"area": area_def}) # by x/y bbox new_scn1 = scene1.crop(xy_bbox=(719695.7781587119, 5427887.407618969, 725068.1609052602, 5433708.364368956)) - assert '1' in new_scn1 - assert new_scn1['1'].shape == (198, 182) + assert "1" in new_scn1 + assert new_scn1["1"].shape == (198, 182) def test_crop_rgb(self): """Test the crop method on multi-dimensional data.""" @@ -136,43 +136,43 @@ def test_crop_rgb(self): scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) - proj_dict = {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, - 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'} + proj_dict = {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, + "lon_0": 0.0, "proj": "geos", "units": "m"} x_size = 3712 y_size = 3712 area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, x_size, y_size, area_extent, ) area_def2 = AreaDefinition( - 'test2', - 'test2', - 'test2', + "test2", + "test2", + "test2", proj_dict, x_size // 2, y_size // 2, area_extent, ) scene1["1"] = xr.DataArray(np.zeros((3, y_size, x_size)), - dims=('bands', 'y', 'x'), - attrs={'area': area_def}) + dims=("bands", "y", "x"), + attrs={"area": area_def}) scene1["2"] = xr.DataArray(np.zeros((y_size // 2, 3, x_size // 2)), - dims=('y', 'bands', 'x'), - attrs={'area': area_def2}) + dims=("y", "bands", "x"), + attrs={"area": area_def2}) # by lon/lat bbox new_scn1 = scene1.crop(ll_bbox=(-20., -5., 0, 0)) - assert '1' in new_scn1 - assert '2' in new_scn1 - assert 'bands' in new_scn1['1'].dims - assert 'bands' in new_scn1['2'].dims - assert new_scn1['1'].shape == (3, 184, 714) - assert new_scn1['2'].shape == (92, 3, 357) + assert "1" in new_scn1 + assert "2" in new_scn1 + assert "bands" in new_scn1["1"].dims + assert "bands" in new_scn1["2"].dims + assert new_scn1["1"].shape == (3, 184, 714) + assert new_scn1["2"].shape == (92, 3, 357) @pytest.mark.usefixtures("include_test_etc") @@ -187,17 +187,17 @@ def _fake_resample_dataset_force_20x20(self, dataset, dest_area, **kwargs): """Return copy of dataset pretending it was resampled to (20, 20) shape.""" data = np.zeros((20, 20)) attrs = dataset.attrs.copy() - attrs['area'] = dest_area + attrs["area"] = dest_area return xr.DataArray( data, - dims=('y', 'x'), + dims=("y", "x"), attrs=attrs, ) - @mock.patch('satpy.scene.resample_dataset') - @pytest.mark.parametrize('datasets', [ + @mock.patch("satpy.scene.resample_dataset") + @pytest.mark.parametrize("datasets", [ None, - ('comp13', 'ds5', 'ds2'), + ("comp13", "ds5", "ds2"), ]) def test_resample_scene_copy(self, rs, datasets): """Test that the Scene is properly copied during resampling. @@ -209,26 +209,26 @@ def test_resample_scene_copy(self, rs, datasets): from pyresample.geometry import AreaDefinition rs.side_effect = self._fake_resample_dataset_force_20x20 - proj_str = ('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') - area_def = AreaDefinition('test', 'test', 'test', proj_str, 5, 5, (-1000., -1500., 1000., 1500.)) + proj_str = ("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs") + area_def = AreaDefinition("test", "test", "test", proj_str, 5, 5, (-1000., -1500., 1000., 1500.)) area_def.get_area_slices = mock.MagicMock() - scene = Scene(filenames=['fake1_1.txt', 'fake1_highres_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt", "fake1_highres_1.txt"], reader="fake1") - scene.load(['comp19']) + scene.load(["comp19"]) new_scene = scene.resample(area_def, datasets=datasets) - new_scene['new_ds'] = new_scene['comp19'].copy() + new_scene["new_ds"] = new_scene["comp19"].copy() - scene.load(['ds1']) + scene.load(["ds1"]) - comp19_node = scene._dependency_tree['comp19'] - ds5_mod_id = make_dataid(name='ds5', modifiers=('res_change',)) + comp19_node = scene._dependency_tree["comp19"] + ds5_mod_id = make_dataid(name="ds5", modifiers=("res_change",)) ds5_node = scene._dependency_tree[ds5_mod_id] - comp13_node = scene._dependency_tree['comp13'] + comp13_node = scene._dependency_tree["comp13"] assert comp13_node.data[1][0] is comp19_node.data[1][0] assert comp13_node.data[1][0] is ds5_node - pytest.raises(KeyError, scene._dependency_tree.__getitem__, 'new_ds') + pytest.raises(KeyError, scene._dependency_tree.__getitem__, "new_ds") # comp19 required resampling to produce so we should have its 3 deps # 1. comp13 @@ -238,15 +238,15 @@ def test_resample_scene_copy(self, rs, datasets): # 4. ds1 loaded_ids = list(scene.keys()) assert len(loaded_ids) == 4 - for name in ('comp13', 'ds5', 'ds2', 'ds1'): - assert any(x['name'] == name for x in loaded_ids) + for name in ("comp13", "ds5", "ds2", "ds1"): + assert any(x["name"] == name for x in loaded_ids) loaded_ids = list(new_scene.keys()) assert len(loaded_ids) == 2 - assert loaded_ids[0] == make_cid(name='comp19') - assert loaded_ids[1] == make_cid(name='new_ds') + assert loaded_ids[0] == make_cid(name="comp19") + assert loaded_ids[1] == make_cid(name="new_ds") - @mock.patch('satpy.scene.resample_dataset') + @mock.patch("satpy.scene.resample_dataset") def test_resample_scene_preserves_requested_dependencies(self, rs): """Test that the Scene is properly copied during resampling. @@ -258,61 +258,61 @@ def test_resample_scene_preserves_requested_dependencies(self, rs): from pyresample.utils import proj4_str_to_dict rs.side_effect = self._fake_resample_dataset - proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 ' - '+units=m +no_defs') - area_def = AreaDefinition('test', 'test', 'test', proj_dict, 5, 5, (-1000., -1500., 1000., 1500.)) + proj_dict = proj4_str_to_dict("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 " + "+units=m +no_defs") + area_def = AreaDefinition("test", "test", "test", proj_dict, 5, 5, (-1000., -1500., 1000., 1500.)) area_def.get_area_slices = mock.MagicMock() - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") # Set PYTHONHASHSEED to 0 in the interpreter to test as intended (comp26 comes before comp14) - scene.load(['comp26', 'comp14'], generate=False) + scene.load(["comp26", "comp14"], generate=False) scene.resample(area_def, unload=True) new_scene_2 = scene.resample(area_def, unload=True) - assert 'comp14' not in scene - assert 'comp26' not in scene - assert 'comp14' in new_scene_2 - assert 'comp26' in new_scene_2 - assert 'ds1' not in new_scene_2 # unloaded + assert "comp14" not in scene + assert "comp26" not in scene + assert "comp14" in new_scene_2 + assert "comp26" in new_scene_2 + assert "ds1" not in new_scene_2 # unloaded - @mock.patch('satpy.scene.resample_dataset') + @mock.patch("satpy.scene.resample_dataset") def test_resample_reduce_data_toggle(self, rs): """Test that the Scene can be reduced or not reduced during resampling.""" from pyresample.geometry import AreaDefinition rs.side_effect = self._fake_resample_dataset_force_20x20 - proj_str = ('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') - target_area = AreaDefinition('test', 'test', 'test', proj_str, 4, 4, (-1000., -1500., 1000., 1500.)) - area_def = AreaDefinition('test', 'test', 'test', proj_str, 5, 5, (-1000., -1500., 1000., 1500.)) + proj_str = ("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs") + target_area = AreaDefinition("test", "test", "test", proj_str, 4, 4, (-1000., -1500., 1000., 1500.)) + area_def = AreaDefinition("test", "test", "test", proj_str, 5, 5, (-1000., -1500., 1000., 1500.)) area_def.get_area_slices = mock.MagicMock() get_area_slices = area_def.get_area_slices get_area_slices.return_value = (slice(0, 3, None), slice(0, 3, None)) - area_def_big = AreaDefinition('test', 'test', 'test', proj_str, 10, 10, (-1000., -1500., 1000., 1500.)) + area_def_big = AreaDefinition("test", "test", "test", proj_str, 10, 10, (-1000., -1500., 1000., 1500.)) area_def_big.get_area_slices = mock.MagicMock() get_area_slices_big = area_def_big.get_area_slices get_area_slices_big.return_value = (slice(0, 6, None), slice(0, 6, None)) # Test that data reduction can be disabled - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp19']) - scene['comp19'].attrs['area'] = area_def - scene['comp19_big'] = xr.DataArray( - da.zeros((10, 10)), dims=('y', 'x'), - attrs=scene['comp19'].attrs.copy()) - scene['comp19_big'].attrs['area'] = area_def_big - scene['comp19_copy'] = scene['comp19'].copy() + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp19"]) + scene["comp19"].attrs["area"] = area_def + scene["comp19_big"] = xr.DataArray( + da.zeros((10, 10)), dims=("y", "x"), + attrs=scene["comp19"].attrs.copy()) + scene["comp19_big"].attrs["area"] = area_def_big + scene["comp19_copy"] = scene["comp19"].copy() orig_slice_data = scene._slice_data # we force the below order of processing to test that success isn't # based on data of the same resolution being processed together test_order = [ - make_cid(**scene['comp19'].attrs), - make_cid(**scene['comp19_big'].attrs), - make_cid(**scene['comp19_copy'].attrs), + make_cid(**scene["comp19"].attrs), + make_cid(**scene["comp19_big"].attrs), + make_cid(**scene["comp19_copy"].attrs), ] - with mock.patch('satpy.scene.Scene._slice_data') as slice_data, \ - mock.patch('satpy.dataset.dataset_walker') as ds_walker: + with mock.patch("satpy.scene.Scene._slice_data") as slice_data, \ + mock.patch("satpy.dataset.dataset_walker") as ds_walker: ds_walker.return_value = test_order slice_data.side_effect = orig_slice_data scene.resample(target_area, reduce_data=False) @@ -332,24 +332,24 @@ def test_resample_ancillary(self): """Test that the Scene reducing data does not affect final output.""" from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict - proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 ' - '+units=m +no_defs') - area_def = AreaDefinition('test', 'test', 'test', proj_dict, 5, 5, (-1000., -1500., 1000., 1500.)) - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - - scene.load(['comp19', 'comp20']) - scene['comp19'].attrs['area'] = area_def - scene['comp19'].attrs['ancillary_variables'] = [scene['comp20']] - scene['comp20'].attrs['area'] = area_def - - dst_area = AreaDefinition('dst', 'dst', 'dst', + proj_dict = proj4_str_to_dict("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 " + "+units=m +no_defs") + area_def = AreaDefinition("test", "test", "test", proj_dict, 5, 5, (-1000., -1500., 1000., 1500.)) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + + scene.load(["comp19", "comp20"]) + scene["comp19"].attrs["area"] = area_def + scene["comp19"].attrs["ancillary_variables"] = [scene["comp20"]] + scene["comp20"].attrs["area"] = area_def + + dst_area = AreaDefinition("dst", "dst", "dst", proj_dict, 2, 2, (-1000., -1500., 0., 0.), ) new_scene = scene.resample(dst_area) - assert new_scene['comp20'] is new_scene['comp19'].attrs['ancillary_variables'][0] + assert new_scene["comp20"] is new_scene["comp19"].attrs["ancillary_variables"][0] def test_resample_multi_ancillary(self): """Test that multiple ancillary variables are retained after resampling. @@ -380,14 +380,14 @@ def test_resample_multi_ancillary(self): def test_resample_reduce_data(self): """Test that the Scene reducing data does not affect final output.""" from pyresample.geometry import AreaDefinition - proj_str = ('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') - area_def = AreaDefinition('test', 'test', 'test', proj_str, 20, 20, (-1000., -1500., 1000., 1500.)) - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - - scene.load(['comp19']) - scene['comp19'].attrs['area'] = area_def - dst_area = AreaDefinition('dst', 'dst', 'dst', + proj_str = ("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs") + area_def = AreaDefinition("test", "test", "test", proj_str, 20, 20, (-1000., -1500., 1000., 1500.)) + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + + scene.load(["comp19"]) + scene["comp19"].attrs["area"] = area_def + dst_area = AreaDefinition("dst", "dst", "dst", proj_str, 20, 20, (-1000., -1500., 0., 0.), @@ -395,24 +395,24 @@ def test_resample_reduce_data(self): new_scene1 = scene.resample(dst_area, reduce_data=False) new_scene2 = scene.resample(dst_area) new_scene3 = scene.resample(dst_area, reduce_data=True) - assert new_scene1['comp19'].shape == (20, 20, 3) - assert new_scene2['comp19'].shape == (20, 20, 3) - assert new_scene3['comp19'].shape == (20, 20, 3) + assert new_scene1["comp19"].shape == (20, 20, 3) + assert new_scene2["comp19"].shape == (20, 20, 3) + assert new_scene3["comp19"].shape == (20, 20, 3) - @mock.patch('satpy.scene.resample_dataset') + @mock.patch("satpy.scene.resample_dataset") def test_no_generate_comp10(self, rs): """Test generating a composite after loading.""" from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict rs.side_effect = self._fake_resample_dataset - proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 ' - '+units=m +no_defs') + proj_dict = proj4_str_to_dict("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 " + "+units=m +no_defs") area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, 200, 400, @@ -420,36 +420,36 @@ def test_no_generate_comp10(self, rs): ) # it is fine that an optional prereq doesn't exist - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') - scene.load(['comp10'], generate=False) - assert any(ds_id['name'] == 'comp10' for ds_id in scene._wishlist) - assert 'comp10' not in scene + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") + scene.load(["comp10"], generate=False) + assert any(ds_id["name"] == "comp10" for ds_id in scene._wishlist) + assert "comp10" not in scene # two dependencies should have been loaded assert len(scene._datasets) == 2 assert len(scene.missing_datasets) == 1 new_scn = scene.resample(area_def, generate=False) - assert 'comp10' not in scene + assert "comp10" not in scene # two dependencies should have been loaded assert len(scene._datasets) == 2 assert len(scene.missing_datasets) == 1 new_scn._generate_composites_from_loaded_datasets() - assert any(ds_id['name'] == 'comp10' for ds_id in new_scn._wishlist) - assert 'comp10' in new_scn + assert any(ds_id["name"] == "comp10" for ds_id in new_scn._wishlist) + assert "comp10" in new_scn assert not new_scn.missing_datasets # try generating them right away new_scn = scene.resample(area_def) - assert any(ds_id['name'] == 'comp10' for ds_id in new_scn._wishlist) - assert 'comp10' in new_scn + assert any(ds_id["name"] == "comp10" for ds_id in new_scn._wishlist) + assert "comp10" in new_scn assert not new_scn.missing_datasets def test_comp_loading_after_resampling_existing_sensor(self): """Test requesting a composite after resampling.""" - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") scene.load(["ds1", "ds2"]) - new_scn = scene.resample(resampler='native') + new_scn = scene.resample(resampler="native") # Can't load from readers after resampling with pytest.raises(KeyError): @@ -463,9 +463,9 @@ def test_comp_loading_after_resampling_existing_sensor(self): def test_comp_loading_after_resampling_new_sensor(self): """Test requesting a composite after resampling when the sensor composites weren't loaded before.""" # this is our base Scene with sensor "fake_sensor2" - scene1 = Scene(filenames=['fake2_3ds_1.txt'], reader='fake2_3ds') + scene1 = Scene(filenames=["fake2_3ds_1.txt"], reader="fake2_3ds") scene1.load(["ds2"]) - new_scn = scene1.resample(resampler='native') + new_scn = scene1.resample(resampler="native") # Can't load from readers after resampling with pytest.raises(KeyError): @@ -534,16 +534,16 @@ def test_comps_need_resampling_optional_mod_deps(self): dependencies that aren't needed which fail. """ - scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene = Scene(filenames=["fake1_1.txt"], reader="fake1") # should require resampling - scene.load(['comp27', 'ds13']) - assert 'comp27' not in scene - assert 'ds13' in scene + scene.load(["comp27", "ds13"]) + assert "comp27" not in scene + assert "ds13" in scene - new_scene = scene.resample(resampler='native') + new_scene = scene.resample(resampler="native") assert len(list(new_scene.keys())) == 2 - assert 'comp27' in new_scene - assert 'ds13' in new_scene + assert "comp27" in new_scene + assert "ds13" in new_scene class TestSceneAggregation: @@ -556,7 +556,7 @@ def test_aggregate(self): scene1 = self._create_test_data(x_size, y_size) - scene2 = scene1.aggregate(func='sum', x=2, y=2) + scene2 = scene1.aggregate(func="sum", x=2, y=2) expected_aggregated_shape = (y_size / 2, x_size / 2) self._check_aggregation_results(expected_aggregated_shape, scene1, scene2, x_size, y_size) @@ -577,40 +577,40 @@ def _create_test_data(x_size, y_size): scene1 = Scene() area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927, 5570248.477339745) - proj_dict = {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, - 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'} + proj_dict = {"a": 6378169.0, "b": 6356583.8, "h": 35785831.0, + "lon_0": 0.0, "proj": "geos", "units": "m"} area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", proj_dict, x_size, y_size, area_extent, ) scene1["1"] = xr.DataArray(np.ones((y_size, x_size)), - attrs={'_satpy_id_keys': default_id_keys_config}) + attrs={"_satpy_id_keys": default_id_keys_config}) scene1["2"] = xr.DataArray(np.ones((y_size, x_size)), - dims=('y', 'x'), - attrs={'_satpy_id_keys': default_id_keys_config}) + dims=("y", "x"), + attrs={"_satpy_id_keys": default_id_keys_config}) scene1["3"] = xr.DataArray(np.ones((y_size, x_size)), - dims=('y', 'x'), - attrs={'area': area_def, '_satpy_id_keys': default_id_keys_config}) + dims=("y", "x"), + attrs={"area": area_def, "_satpy_id_keys": default_id_keys_config}) scene1["4"] = xr.DataArray(np.ones((y_size, x_size)), - dims=('y', 'x'), - attrs={'area': area_def, 'standard_name': 'backscatter', - '_satpy_id_keys': default_id_keys_config}) + dims=("y", "x"), + attrs={"area": area_def, "standard_name": "backscatter", + "_satpy_id_keys": default_id_keys_config}) return scene1 def _check_aggregation_results(self, expected_aggregated_shape, scene1, scene2, x_size, y_size): - assert scene1['1'] is scene2['1'] - assert scene1['2'] is scene2['2'] - np.testing.assert_allclose(scene2['3'].data, 4) - assert scene2['1'].shape == (y_size, x_size) - assert scene2['2'].shape == (y_size, x_size) - assert scene2['3'].shape == expected_aggregated_shape - assert 'standard_name' in scene2['4'].attrs - assert scene2['4'].attrs['standard_name'] == 'backscatter' + assert scene1["1"] is scene2["1"] + assert scene1["2"] is scene2["2"] + np.testing.assert_allclose(scene2["3"].data, 4) + assert scene2["1"].shape == (y_size, x_size) + assert scene2["2"].shape == (y_size, x_size) + assert scene2["3"].shape == expected_aggregated_shape + assert "standard_name" in scene2["4"].attrs + assert scene2["4"].attrs["standard_name"] == "backscatter" def test_aggregate_with_boundary(self): """Test aggregation with boundary argument.""" @@ -619,9 +619,9 @@ def test_aggregate_with_boundary(self): scene1 = self._create_test_data(x_size, y_size) - with pytest.raises(ValueError): - scene1.aggregate(func='sum', x=2, y=2, boundary='exact') + with pytest.raises(ValueError, match="Could not coarsen a dimension.*"): + scene1.aggregate(func="sum", x=2, y=2, boundary="exact") - scene2 = scene1.aggregate(func='sum', x=2, y=2, boundary='trim') + scene2 = scene1.aggregate(func="sum", x=2, y=2, boundary="trim") expected_aggregated_shape = (y_size // 2, x_size // 2) self._check_aggregation_results(expected_aggregated_shape, scene1, scene2, x_size, y_size) diff --git a/satpy/tests/scene_tests/test_saving.py b/satpy/tests/scene_tests/test_saving.py index 188d9c1e75..32c6ff61c2 100644 --- a/satpy/tests/scene_tests/test_saving.py +++ b/satpy/tests/scene_tests/test_saving.py @@ -37,69 +37,70 @@ def test_save_datasets_default(self, tmp_path): """Save a dataset using 'save_datasets'.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime(2018, 1, 1, 0, 0, 0)} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() - scn['test'] = ds1 + scn["test"] = ds1 scn.save_datasets(base_dir=tmp_path) - assert os.path.isfile(os.path.join(tmp_path, 'test_20180101_000000.tif')) + assert os.path.isfile(os.path.join(tmp_path, "test_20180101_000000.tif")) def test_save_datasets_by_ext(self, tmp_path): """Save a dataset using 'save_datasets' with 'filename'.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime(2018, 1, 1, 0, 0, 0)} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() - scn['test'] = ds1 + scn["test"] = ds1 from satpy.writers.simple_image import PillowWriter save_image_mock = spy_decorator(PillowWriter.save_image) - with mock.patch.object(PillowWriter, 'save_image', save_image_mock): - scn.save_datasets(base_dir=tmp_path, filename='{name}.png') + with mock.patch.object(PillowWriter, "save_image", save_image_mock): + scn.save_datasets(base_dir=tmp_path, filename="{name}.png") save_image_mock.mock.assert_called_once() - assert os.path.isfile(os.path.join(tmp_path, 'test.png')) + assert os.path.isfile(os.path.join(tmp_path, "test.png")) def test_save_datasets_bad_writer(self, tmp_path): """Save a dataset using 'save_datasets' and a bad writer.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow()} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow()} ) scn = Scene() - scn['test'] = ds1 + scn["test"] = ds1 pytest.raises(ValueError, scn.save_datasets, - writer='_bad_writer_', - base_dir=tmp_path) + writer="_bad_writer_", + base_dir=tmp_path, + match="Unknown writer '_bad_writer_'") def test_save_datasets_missing_wishlist(self, tmp_path): """Calling 'save_datasets' with no valid datasets.""" scn = Scene() - scn._wishlist.add(make_cid(name='true_color')) + scn._wishlist.add(make_cid(name="true_color")) pytest.raises(RuntimeError, scn.save_datasets, - writer='geotiff', + writer="geotiff", base_dir=tmp_path) pytest.raises(KeyError, scn.save_datasets, - datasets=['no_exist']) + datasets=["no_exist"]) def test_save_dataset_default(self, tmp_path): """Save a dataset using 'save_dataset'.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime(2018, 1, 1, 0, 0, 0)} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime(2018, 1, 1, 0, 0, 0)} ) scn = Scene() - scn['test'] = ds1 - scn.save_dataset('test', base_dir=tmp_path) - assert os.path.isfile(os.path.join(tmp_path, 'test_20180101_000000.tif')) + scn["test"] = ds1 + scn.save_dataset("test", base_dir=tmp_path) + assert os.path.isfile(os.path.join(tmp_path, "test_20180101_000000.tif")) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index f056d2fa93..a872ce31c4 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -39,24 +39,24 @@ class TestMatchDataArrays(unittest.TestCase): """Test the utility method 'match_data_arrays'.""" - def _get_test_ds(self, shape=(50, 100), dims=('y', 'x')): + def _get_test_ds(self, shape=(50, 100), dims=("y", "x")): """Get a fake DataArray.""" from pyresample.geometry import AreaDefinition data = da.random.random(shape, chunks=25) area = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, - shape[dims.index('x')], shape[dims.index('y')], + "test", "test", "test", + {"proj": "eqc", "lon_0": 0.0, + "lat_0": 0.0}, + shape[dims.index("x")], shape[dims.index("y")], (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) - attrs = {'area': area} + attrs = {"area": area} return xr.DataArray(data, dims=dims, attrs=attrs) def test_single_ds(self): """Test a single dataset is returned unharmed.""" from satpy.composites import CompositeBase ds1 = self._get_test_ds() - comp = CompositeBase('test_comp') + comp = CompositeBase("test_comp") ret_datasets = comp.match_data_arrays((ds1,)) assert ret_datasets[0].identical(ds1) @@ -65,7 +65,7 @@ def test_mult_ds_area(self): from satpy.composites import CompositeBase ds1 = self._get_test_ds() ds2 = self._get_test_ds() - comp = CompositeBase('test_comp') + comp = CompositeBase("test_comp") ret_datasets = comp.match_data_arrays((ds1, ds2)) assert ret_datasets[0].identical(ds1) assert ret_datasets[1].identical(ds2) @@ -75,8 +75,8 @@ def test_mult_ds_no_area(self): from satpy.composites import CompositeBase ds1 = self._get_test_ds() ds2 = self._get_test_ds() - del ds2.attrs['area'] - comp = CompositeBase('test_comp') + del ds2.attrs["area"] + comp = CompositeBase("test_comp") self.assertRaises(ValueError, comp.match_data_arrays, (ds1, ds2)) def test_mult_ds_diff_area(self): @@ -86,13 +86,13 @@ def test_mult_ds_diff_area(self): from satpy.composites import CompositeBase, IncompatibleAreas ds1 = self._get_test_ds() ds2 = self._get_test_ds() - ds2.attrs['area'] = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, + ds2.attrs["area"] = AreaDefinition( + "test", "test", "test", + {"proj": "eqc", "lon_0": 0.0, + "lat_0": 0.0}, 100, 50, (-30037508.34, -20018754.17, 10037508.34, 18754.17)) - comp = CompositeBase('test_comp') + comp = CompositeBase("test_comp") self.assertRaises(IncompatibleAreas, comp.match_data_arrays, (ds1, ds2)) def test_mult_ds_diff_dims(self): @@ -101,9 +101,9 @@ def test_mult_ds_diff_dims(self): # x is still 50, y is still 100, even though they are in # different order - ds1 = self._get_test_ds(shape=(50, 100), dims=('y', 'x')) - ds2 = self._get_test_ds(shape=(3, 100, 50), dims=('bands', 'x', 'y')) - comp = CompositeBase('test_comp') + ds1 = self._get_test_ds(shape=(50, 100), dims=("y", "x")) + ds2 = self._get_test_ds(shape=(3, 100, 50), dims=("bands", "x", "y")) + comp = CompositeBase("test_comp") ret_datasets = comp.match_data_arrays((ds1, ds2)) assert ret_datasets[0].identical(ds1) assert ret_datasets[1].identical(ds2) @@ -114,19 +114,19 @@ def test_mult_ds_diff_size(self): # x is 50 in this one, 100 in ds2 # y is 100 in this one, 50 in ds2 - ds1 = self._get_test_ds(shape=(50, 100), dims=('x', 'y')) - ds2 = self._get_test_ds(shape=(3, 50, 100), dims=('bands', 'y', 'x')) - comp = CompositeBase('test_comp') + ds1 = self._get_test_ds(shape=(50, 100), dims=("x", "y")) + ds2 = self._get_test_ds(shape=(3, 50, 100), dims=("bands", "y", "x")) + comp = CompositeBase("test_comp") self.assertRaises(IncompatibleAreas, comp.match_data_arrays, (ds1, ds2)) def test_nondimensional_coords(self): """Test the removal of non-dimensional coordinates when compositing.""" from satpy.composites import CompositeBase ds = self._get_test_ds(shape=(2, 2)) - ds['acq_time'] = ('y', [0, 1]) - comp = CompositeBase('test_comp') + ds["acq_time"] = ("y", [0, 1]) + comp = CompositeBase("test_comp") ret_datasets = comp.match_data_arrays([ds, ds]) - self.assertNotIn('acq_time', ret_datasets[0].coords) + assert "acq_time" not in ret_datasets[0].coords class TestRatioSharpenedCompositors: @@ -135,89 +135,89 @@ class TestRatioSharpenedCompositors: def setup_method(self): """Create test data.""" from pyresample.geometry import AreaDefinition - area = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 2, 2, + area = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 2, 2, (-2000, -2000, 2000, 2000)) - attrs = {'area': area, - 'start_time': datetime(2018, 1, 1, 18), - 'modifiers': tuple(), - 'resolution': 1000, - 'calibration': 'reflectance', - 'units': '%', - 'name': 'test_vis'} + attrs = {"area": area, + "start_time": datetime(2018, 1, 1, 18), + "modifiers": tuple(), + "resolution": 1000, + "calibration": "reflectance", + "units": "%", + "name": "test_vis"} low_res_data = np.ones((2, 2), dtype=np.float64) + 4 low_res_data[1, 1] = 0.0 # produces infinite ratio ds1 = xr.DataArray(da.from_array(low_res_data, chunks=2), - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) self.ds1 = ds1 ds2 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 2, - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) - ds2.attrs['name'] += '2' + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) + ds2.attrs["name"] += "2" self.ds2 = ds2 ds3 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 3, - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) - ds3.attrs['name'] += '3' + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) + ds3.attrs["name"] += "3" self.ds3 = ds3 # high resolution version high_res_data = np.ones((2, 2), dtype=np.float64) high_res_data[1, 0] = np.nan # invalid value in one band ds4 = xr.DataArray(da.from_array(high_res_data, chunks=2), - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) - ds4.attrs['name'] += '4' - ds4.attrs['resolution'] = 500 + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) + ds4.attrs["name"] += "4" + ds4.attrs["resolution"] = 500 self.ds4 = ds4 # high resolution version - but too big ds4_big = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64), - attrs=attrs.copy(), dims=('y', 'x'), - coords={'y': [0, 1, 2, 3], 'x': [0, 1, 2, 3]}) - ds4_big.attrs['name'] += '4' - ds4_big.attrs['resolution'] = 500 - ds4_big.attrs['rows_per_scan'] = 1 - ds4_big.attrs['area'] = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 4, 4, + attrs=attrs.copy(), dims=("y", "x"), + coords={"y": [0, 1, 2, 3], "x": [0, 1, 2, 3]}) + ds4_big.attrs["name"] += "4" + ds4_big.attrs["resolution"] = 500 + ds4_big.attrs["rows_per_scan"] = 1 + ds4_big.attrs["area"] = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 4, 4, (-2000, -2000, 2000, 2000)) self.ds4_big = ds4_big @pytest.mark.parametrize( "init_kwargs", [ - {'high_resolution_band': "bad", 'neutral_resolution_band': "red"}, - {'high_resolution_band': "red", 'neutral_resolution_band': "bad"} + {"high_resolution_band": "bad", "neutral_resolution_band": "red"}, + {"high_resolution_band": "red", "neutral_resolution_band": "bad"} ] ) def test_bad_colors(self, init_kwargs): """Test that only valid band colors can be provided.""" from satpy.composites import RatioSharpenedRGB - with pytest.raises(ValueError): - RatioSharpenedRGB(name='true_color', **init_kwargs) + with pytest.raises(ValueError, match="RatioSharpenedRGB..*_band must be one of .*"): + RatioSharpenedRGB(name="true_color", **init_kwargs) def test_match_data_arrays(self): """Test that all areas have to be the same resolution.""" from satpy.composites import IncompatibleAreas, RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color') + comp = RatioSharpenedRGB(name="true_color") with pytest.raises(IncompatibleAreas): comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4_big,)) def test_more_than_three_datasets(self): """Test that only 3 datasets can be passed.""" from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color') - with pytest.raises(ValueError): + comp = RatioSharpenedRGB(name="true_color") + with pytest.raises(ValueError, match="Expected 3 datasets, got 4"): comp((self.ds1, self.ds2, self.ds3, self.ds1), optional_datasets=(self.ds4_big,)) def test_self_sharpened_no_high_res(self): """Test for exception when no high_res band is specified.""" from satpy.composites import SelfSharpenedRGB - comp = SelfSharpenedRGB(name='true_color', high_resolution_band=None) - with pytest.raises(ValueError): + comp = SelfSharpenedRGB(name="true_color", high_resolution_band=None) + with pytest.raises(ValueError, match="SelfSharpenedRGB requires at least one high resolution band, not 'None'"): comp((self.ds1, self.ds2, self.ds3)) def test_basic_no_high_res(self): @@ -266,7 +266,7 @@ def test_basic_no_sharpen(self): def test_ratio_sharpening(self, high_resolution_band, neutral_resolution_band, exp_r, exp_g, exp_b): """Test RatioSharpenedRGB by different groups of high_resolution_band and neutral_resolution_band.""" from satpy.composites import RatioSharpenedRGB - comp = RatioSharpenedRGB(name='true_color', high_resolution_band=high_resolution_band, + comp = RatioSharpenedRGB(name="true_color", high_resolution_band=high_resolution_band, neutral_resolution_band=neutral_resolution_band) res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) @@ -291,7 +291,7 @@ def test_ratio_sharpening(self, high_resolution_band, neutral_resolution_band, e def test_self_sharpened_basic(self, exp_shape, exp_r, exp_g, exp_b): """Test that three datasets can be passed without optional high res.""" from satpy.composites import SelfSharpenedRGB - comp = SelfSharpenedRGB(name='true_color') + comp = SelfSharpenedRGB(name="true_color") res = comp((self.ds1, self.ds2, self.ds3)) data = res.values @@ -307,62 +307,62 @@ class TestDifferenceCompositor(unittest.TestCase): def setUp(self): """Create test data.""" from pyresample.geometry import AreaDefinition - area = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 2, 2, + area = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 2, 2, (-2000, -2000, 2000, 2000)) - attrs = {'area': area, - 'start_time': datetime(2018, 1, 1, 18), - 'modifiers': tuple(), - 'resolution': 1000, - 'name': 'test_vis'} + attrs = {"area": area, + "start_time": datetime(2018, 1, 1, 18), + "modifiers": tuple(), + "resolution": 1000, + "name": "test_vis"} ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64), - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) self.ds1 = ds1 ds2 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 2, - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) - ds2.attrs['name'] += '2' + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) + ds2.attrs["name"] += "2" self.ds2 = ds2 # high res version ds2 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64) + 4, - attrs=attrs.copy(), dims=('y', 'x'), - coords={'y': [0, 1, 2, 3], 'x': [0, 1, 2, 3]}) - ds2.attrs['name'] += '2' - ds2.attrs['resolution'] = 500 - ds2.attrs['rows_per_scan'] = 1 - ds2.attrs['area'] = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 4, 4, + attrs=attrs.copy(), dims=("y", "x"), + coords={"y": [0, 1, 2, 3], "x": [0, 1, 2, 3]}) + ds2.attrs["name"] += "2" + ds2.attrs["resolution"] = 500 + ds2.attrs["rows_per_scan"] = 1 + ds2.attrs["area"] = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 4, 4, (-2000, -2000, 2000, 2000)) self.ds2_big = ds2 def test_basic_diff(self): """Test that a basic difference composite works.""" from satpy.composites import DifferenceCompositor - comp = DifferenceCompositor(name='diff', standard_name='temperature_difference') + comp = DifferenceCompositor(name="diff", standard_name="temperature_difference") res = comp((self.ds1, self.ds2)) np.testing.assert_allclose(res.values, -2) - assert res.attrs.get('standard_name') == 'temperature_difference' + assert res.attrs.get("standard_name") == "temperature_difference" def test_bad_areas_diff(self): """Test that a difference where resolutions are different fails.""" from satpy.composites import DifferenceCompositor, IncompatibleAreas - comp = DifferenceCompositor(name='diff') + comp = DifferenceCompositor(name="diff") # too many arguments self.assertRaises(ValueError, comp, (self.ds1, self.ds2, self.ds2_big)) # different resolution self.assertRaises(IncompatibleAreas, comp, (self.ds1, self.ds2_big)) -@pytest.fixture +@pytest.fixture() def fake_area(): """Return a fake 2×2 area.""" from pyresample.geometry import create_area_def return create_area_def("skierffe", 4087, area_extent=[-5_000, -5_000, 5_000, 5_000], shape=(2, 2)) -@pytest.fixture +@pytest.fixture() def fake_dataset_pair(fake_area): """Return a fake pair of 2×2 datasets.""" ds1 = xr.DataArray(da.full((2, 2), 8, chunks=2, dtype=np.float32), attrs={"area": fake_area}) @@ -391,7 +391,7 @@ class TestDayNightCompositor(unittest.TestCase): def setUp(self): """Create test data.""" - bands = ['R', 'G', 'B'] + bands = ["R", "G", "B"] start_time = datetime(2018, 1, 1, 18, 0, 0) # RGB @@ -401,20 +401,20 @@ def setUp(self): a[:, 1, 0] = 0.3 a[:, 1, 1] = 0.4 a = da.from_array(a, a.shape) - self.data_a = xr.DataArray(a, attrs={'test': 'a', 'start_time': start_time}, - coords={'bands': bands}, dims=('bands', 'y', 'x')) + self.data_a = xr.DataArray(a, attrs={"test": "a", "start_time": start_time}, + coords={"bands": bands}, dims=("bands", "y", "x")) b = np.zeros((3, 2, 2), dtype=np.float64) b[:, 0, 0] = np.nan b[:, 0, 1] = 0.25 b[:, 1, 0] = 0.50 b[:, 1, 1] = 0.75 b = da.from_array(b, b.shape) - self.data_b = xr.DataArray(b, attrs={'test': 'b', 'start_time': start_time}, - coords={'bands': bands}, dims=('bands', 'y', 'x')) + self.data_b = xr.DataArray(b, attrs={"test": "b", "start_time": start_time}, + coords={"bands": bands}, dims=("bands", "y", "x")) sza = np.array([[80., 86.], [94., 100.]]) sza = da.from_array(sza, sza.shape) - self.sza = xr.DataArray(sza, dims=('y', 'x')) + self.sza = xr.DataArray(sza, dims=("y", "x")) # fake area my_area = AreaDefinition( @@ -423,15 +423,15 @@ def setUp(self): 2, 2, (-95.0, 40.0, -92.0, 43.0), ) - self.data_a.attrs['area'] = my_area - self.data_b.attrs['area'] = my_area + self.data_a.attrs["area"] = my_area + self.data_b.attrs["area"] = my_area # not used except to check that it matches the data arrays - self.sza.attrs['area'] = my_area + self.sza.attrs["area"] = my_area def test_daynight_sza(self): """Test compositor with both day and night portions when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_night") + comp = DayNightCompositor(name="dn_test", day_night="day_night") res = comp((self.data_a, self.data_b, self.sza)) res = res.compute() expected = np.array([[0., 0.22122352], [0.5, 1.]]) @@ -440,7 +440,7 @@ def test_daynight_sza(self): def test_daynight_area(self): """Test compositor both day and night portions when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_night") + comp = DayNightCompositor(name="dn_test", day_night="day_night") res = comp((self.data_a, self.data_b)) res = res.compute() expected_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) @@ -450,7 +450,7 @@ def test_daynight_area(self): def test_night_only_sza_with_alpha(self): """Test compositor with night portion with alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=True) + comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=True) res = comp((self.data_b, self.sza)) res = res.compute() expected_red_channel = np.array([[np.nan, 0.], [0.5, 1.]]) @@ -461,17 +461,17 @@ def test_night_only_sza_with_alpha(self): def test_night_only_sza_without_alpha(self): """Test compositor with night portion without alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=False) + comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=False) res = comp((self.data_a, self.sza)) res = res.compute() expected = np.array([[0., 0.11042631], [0.66835017, 1.]]) np.testing.assert_allclose(res.values[0], expected) - assert 'A' not in res.bands + assert "A" not in res.bands def test_night_only_area_with_alpha(self): """Test compositor with night portion with alpha band when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=True) + comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=True) res = comp((self.data_b,)) res = res.compute() expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]]) @@ -482,17 +482,17 @@ def test_night_only_area_with_alpha(self): def test_night_only_area_without_alpha(self): """Test compositor with night portion without alpha band when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="night_only", include_alpha=False) + comp = DayNightCompositor(name="dn_test", day_night="night_only", include_alpha=False) res = comp((self.data_b,)) res = res.compute() expected = np.array([[np.nan, 0.], [0., 0.]]) np.testing.assert_allclose(res.values[0], expected) - assert 'A' not in res.bands + assert "A" not in res.bands def test_day_only_sza_with_alpha(self): """Test compositor with day portion with alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) + comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_a, self.sza)) res = res.compute() expected_red_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) @@ -503,18 +503,18 @@ def test_day_only_sza_with_alpha(self): def test_day_only_sza_without_alpha(self): """Test compositor with day portion without alpha band when SZA data is included.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=False) + comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=False) res = comp((self.data_a, self.sza)) res = res.compute() expected_channel_data = np.array([[0., 0.22122352], [0., 0.]]) for i in range(3): np.testing.assert_allclose(res.values[i], expected_channel_data) - assert 'A' not in res.bands + assert "A" not in res.bands def test_day_only_area_with_alpha(self): """Test compositor with day portion with alpha_band when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) + comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_a,)) res = res.compute() expected_l_channel = np.array([[0., 0.33164983], [0.66835017, 1.]]) @@ -525,7 +525,7 @@ def test_day_only_area_with_alpha(self): def test_day_only_area_with_alpha_and_missing_data(self): """Test compositor with day portion with alpha_band when SZA data is not provided and there is missing data.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=True) + comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=True) res = comp((self.data_b,)) res = res.compute() expected_l_channel = np.array([[np.nan, 0.], [0.5, 1.]]) @@ -536,12 +536,12 @@ def test_day_only_area_with_alpha_and_missing_data(self): def test_day_only_area_without_alpha(self): """Test compositor with day portion without alpha_band when SZA data is not provided.""" from satpy.composites import DayNightCompositor - comp = DayNightCompositor(name='dn_test', day_night="day_only", include_alpha=False) + comp = DayNightCompositor(name="dn_test", day_night="day_only", include_alpha=False) res = comp((self.data_a,)) res = res.compute() expected = np.array([[0., 0.33164983], [0.66835017, 1.]]) np.testing.assert_allclose(res.values[0], expected) - assert 'A' not in res.bands + assert "A" not in res.bands class TestFillingCompositor(unittest.TestCase): @@ -550,15 +550,15 @@ class TestFillingCompositor(unittest.TestCase): def test_fill(self): """Test filling.""" from satpy.composites import FillingCompositor - comp = FillingCompositor(name='fill_test') + comp = FillingCompositor(name="fill_test") filler = xr.DataArray(np.array([1, 2, 3, 4, 3, 2, 1])) red = xr.DataArray(np.array([1, 2, 3, np.nan, 3, 2, 1])) green = xr.DataArray(np.array([np.nan, 2, 3, 4, 3, 2, np.nan])) blue = xr.DataArray(np.array([4, 3, 2, 1, 2, 3, 4])) res = comp([filler, red, green, blue]) - np.testing.assert_allclose(res.sel(bands='R').data, filler.data) - np.testing.assert_allclose(res.sel(bands='G').data, filler.data) - np.testing.assert_allclose(res.sel(bands='B').data, blue.data) + np.testing.assert_allclose(res.sel(bands="R").data, filler.data) + np.testing.assert_allclose(res.sel(bands="G").data, filler.data) + np.testing.assert_allclose(res.sel(bands="B").data, blue.data) class TestMultiFiller(unittest.TestCase): @@ -567,7 +567,7 @@ class TestMultiFiller(unittest.TestCase): def test_fill(self): """Test filling.""" from satpy.composites import MultiFiller - comp = MultiFiller(name='fill_test') + comp = MultiFiller(name="fill_test") attrs = {"units": "K"} a = xr.DataArray(np.array([1, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan]), attrs=attrs.copy()) b = xr.DataArray(np.array([np.nan, 2, 3, np.nan, np.nan, np.nan, np.nan]), attrs=attrs.copy()) @@ -587,29 +587,29 @@ class TestLuminanceSharpeningCompositor(unittest.TestCase): def test_compositor(self): """Test luminance sharpening compositor.""" from satpy.composites import LuminanceSharpeningCompositor - comp = LuminanceSharpeningCompositor(name='test') + comp = LuminanceSharpeningCompositor(name="test") # Three shades of grey rgb_arr = np.array([1, 50, 100, 200, 1, 50, 100, 200, 1, 50, 100, 200]) rgb = xr.DataArray(rgb_arr.reshape((3, 2, 2)), - dims=['bands', 'y', 'x'], coords={'bands': ['R', 'G', 'B']}) + dims=["bands", "y", "x"], coords={"bands": ["R", "G", "B"]}) # 100 % luminance -> all result values ~1.0 lum = xr.DataArray(np.array([[100., 100.], [100., 100.]]), - dims=['y', 'x']) + dims=["y", "x"]) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 1., atol=1e-9) # 50 % luminance, all result values ~0.5 lum = xr.DataArray(np.array([[50., 50.], [50., 50.]]), - dims=['y', 'x']) + dims=["y", "x"]) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 0.5, atol=1e-9) # 30 % luminance, all result values ~0.3 lum = xr.DataArray(np.array([[30., 30.], [30., 30.]]), - dims=['y', 'x']) + dims=["y", "x"]) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 0.3, atol=1e-9) # 0 % luminance, all values ~0.0 lum = xr.DataArray(np.array([[0., 0.], [0., 0.]]), - dims=['y', 'x']) + dims=["y", "x"]) res = comp([lum, rgb]) np.testing.assert_allclose(res.data, 0.0, atol=1e-9) @@ -619,31 +619,31 @@ class TestSandwichCompositor: # Test RGB and RGBA @pytest.mark.parametrize( - "input_shape,bands", + ("input_shape", "bands"), [ - ((3, 2, 2), ['R', 'G', 'B']), - ((4, 2, 2), ['R', 'G', 'B', 'A']) + ((3, 2, 2), ["R", "G", "B"]), + ((4, 2, 2), ["R", "G", "B", "A"]) ] ) - @mock.patch('satpy.composites.enhance2dataset') + @mock.patch("satpy.composites.enhance2dataset") def test_compositor(self, e2d, input_shape, bands): """Test luminance sharpening compositor.""" from satpy.composites import SandwichCompositor rgb_arr = da.from_array(np.random.random(input_shape), chunks=2) - rgb = xr.DataArray(rgb_arr, dims=['bands', 'y', 'x'], - coords={'bands': bands}) + rgb = xr.DataArray(rgb_arr, dims=["bands", "y", "x"], + coords={"bands": bands}) lum_arr = da.from_array(100 * np.random.random((2, 2)), chunks=2) - lum = xr.DataArray(lum_arr, dims=['y', 'x']) + lum = xr.DataArray(lum_arr, dims=["y", "x"]) # Make enhance2dataset return unmodified dataset e2d.return_value = rgb - comp = SandwichCompositor(name='test') + comp = SandwichCompositor(name="test") res = comp([lum, rgb]) for band in rgb: - if band.bands != 'A': + if band.bands != "A": # Check compositor has modified this band np.testing.assert_allclose(res.loc[band.bands].to_numpy(), band.to_numpy() * lum_arr / 100.) @@ -661,32 +661,28 @@ class TestInlineComposites(unittest.TestCase): def test_inline_composites(self): """Test that inline composites are working.""" from satpy.composites.config_loader import load_compositor_configs_for_sensors - comps = load_compositor_configs_for_sensors(['visir'])[0] + comps = load_compositor_configs_for_sensors(["visir"])[0] # Check that "fog" product has all its prerequisites defined - keys = comps['visir'].keys() - fog = [comps['visir'][dsid] for dsid in keys if "fog" == dsid['name']][0] - self.assertEqual(fog.attrs['prerequisites'][0]['name'], '_fog_dep_0') - self.assertEqual(fog.attrs['prerequisites'][1]['name'], '_fog_dep_1') - self.assertEqual(fog.attrs['prerequisites'][2], 10.8) + keys = comps["visir"].keys() + fog = [comps["visir"][dsid] for dsid in keys if "fog" == dsid["name"]][0] + assert fog.attrs["prerequisites"][0]["name"] == "_fog_dep_0" + assert fog.attrs["prerequisites"][1]["name"] == "_fog_dep_1" + assert fog.attrs["prerequisites"][2] == 10.8 # Check that the sub-composite dependencies use wavelengths # (numeric values) - keys = comps['visir'].keys() - fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid['name']] - self.assertEqual(comps['visir'][fog_dep_ids[0]].attrs['prerequisites'], - [12.0, 10.8]) - self.assertEqual(comps['visir'][fog_dep_ids[1]].attrs['prerequisites'], - [10.8, 8.7]) + keys = comps["visir"].keys() + fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid["name"]] + assert comps["visir"][fog_dep_ids[0]].attrs["prerequisites"] == [12.0, 10.8] + assert comps["visir"][fog_dep_ids[1]].attrs["prerequisites"] == [10.8, 8.7] # Check the same for SEVIRI and verify channel names are used # in the sub-composite dependencies instead of wavelengths - comps = load_compositor_configs_for_sensors(['seviri'])[0] - keys = comps['seviri'].keys() - fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid['name']] - self.assertEqual(comps['seviri'][fog_dep_ids[0]].attrs['prerequisites'], - ['IR_120', 'IR_108']) - self.assertEqual(comps['seviri'][fog_dep_ids[1]].attrs['prerequisites'], - ['IR_108', 'IR_087']) + comps = load_compositor_configs_for_sensors(["seviri"])[0] + keys = comps["seviri"].keys() + fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid["name"]] + assert comps["seviri"][fog_dep_ids[0]].attrs["prerequisites"] == ["IR_120", "IR_108"] + assert comps["seviri"][fog_dep_ids[1]].attrs["prerequisites"] == ["IR_108", "IR_087"] class TestColormapCompositor(unittest.TestCase): @@ -695,23 +691,23 @@ class TestColormapCompositor(unittest.TestCase): def setUp(self): """Set up the test case.""" from satpy.composites import ColormapCompositor - self.colormap_compositor = ColormapCompositor('test_cmap_compositor') + self.colormap_compositor = ColormapCompositor("test_cmap_compositor") def test_build_colormap_with_int_data_and_without_meanings(self): """Test colormap building.""" palette = np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]) colormap, squeezed_palette = self.colormap_compositor.build_colormap(palette, np.uint8, {}) - self.assertTrue(np.allclose(colormap.values, [0, 1])) - self.assertTrue(np.allclose(squeezed_palette, palette / 255.0)) + assert np.allclose(colormap.values, [0, 1]) + assert np.allclose(squeezed_palette, palette / 255.0) def test_build_colormap_with_int_data_and_with_meanings(self): """Test colormap building.""" palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), - dims=['value', 'band']) - palette.attrs['palette_meanings'] = [2, 3, 4] + dims=["value", "band"]) + palette.attrs["palette_meanings"] = [2, 3, 4] colormap, squeezed_palette = self.colormap_compositor.build_colormap(palette, np.uint8, {}) - self.assertTrue(np.allclose(colormap.values, [2, 3, 4])) - self.assertTrue(np.allclose(squeezed_palette, palette / 255.0)) + assert np.allclose(colormap.values, [2, 3, 4]) + assert np.allclose(squeezed_palette, palette / 255.0) class TestPaletteCompositor(unittest.TestCase): @@ -720,12 +716,12 @@ class TestPaletteCompositor(unittest.TestCase): def test_call(self): """Test palette compositing.""" from satpy.composites import PaletteCompositor - cmap_comp = PaletteCompositor('test_cmap_compositor') + cmap_comp = PaletteCompositor("test_cmap_compositor") palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), - dims=['value', 'band']) - palette.attrs['palette_meanings'] = [2, 3, 4] + dims=["value", "band"]) + palette.attrs["palette_meanings"] = [2, 3, 4] - data = xr.DataArray(da.from_array(np.array([[4, 3, 2], [2, 3, 4]], dtype=np.uint8)), dims=['y', 'x']) + data = xr.DataArray(da.from_array(np.array([[4, 3, 2], [2, 3, 4]], dtype=np.uint8)), dims=["y", "x"]) res = cmap_comp([data, palette]) exp = np.array([[[1., 0.498039, 0.], [0., 0.498039, 1.]], @@ -733,7 +729,7 @@ def test_call(self): [0., 0.498039, 1.]], [[1., 0.498039, 0.], [0., 0.498039, 1.]]]) - self.assertTrue(np.allclose(res, exp)) + assert np.allclose(res, exp) class TestColorizeCompositor(unittest.TestCase): @@ -742,15 +738,15 @@ class TestColorizeCompositor(unittest.TestCase): def test_colorize_no_fill(self): """Test colorizing.""" from satpy.composites import ColorizeCompositor - colormap_composite = ColorizeCompositor('test_color_compositor') + colormap_composite = ColorizeCompositor("test_color_compositor") palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), - dims=['value', 'band']) - palette.attrs['palette_meanings'] = [2, 3, 4] + dims=["value", "band"]) + palette.attrs["palette_meanings"] = [2, 3, 4] data = xr.DataArray(np.array([[4, 3, 2], [2, 3, 4]], dtype=np.uint8), - dims=['y', 'x']) + dims=["y", "x"]) res = colormap_composite([data, palette]) exp = np.array([[[1., 0.498039, 0.], [0., 0.498039, 1.]], @@ -758,20 +754,20 @@ def test_colorize_no_fill(self): [0., 0.498039, 1.]], [[1., 0.498039, 0.], [0., 0.498039, 1.]]]) - self.assertTrue(np.allclose(res, exp, atol=1e-4)) + assert np.allclose(res, exp, atol=0.0001) def test_colorize_with_interpolation(self): """Test colorizing with interpolation.""" from satpy.composites import ColorizeCompositor - colormap_composite = ColorizeCompositor('test_color_compositor') + colormap_composite = ColorizeCompositor("test_color_compositor") palette = xr.DataArray(np.array([[0, 0, 0], [127, 127, 127], [255, 255, 255]]), - dims=['value', 'band']) - palette.attrs['palette_meanings'] = [2, 3, 4] + dims=["value", "band"]) + palette.attrs["palette_meanings"] = [2, 3, 4] data = xr.DataArray(da.from_array(np.array([[4, 3, 2.5], [2, 3.2, 4]])), - dims=['y', 'x'], - attrs={'valid_range': np.array([2, 4])}) + dims=["y", "x"], + attrs={"valid_range": np.array([2, 4])}) res = colormap_composite([data, palette]) exp = np.array([[[1.0, 0.498039, 0.246575], [0., 0.59309977, 1.0]], @@ -788,7 +784,7 @@ class TestCloudCompositorWithoutCloudfree: def setup_method(self): """Set up the test case.""" from satpy.composites.cloud_products import CloudCompositorWithoutCloudfree - self.colormap_composite = CloudCompositorWithoutCloudfree('test_cmap_compositor') + self.colormap_composite = CloudCompositorWithoutCloudfree("test_cmap_compositor") self.exp = np.array([[4, 3, 2], [2, 3, np.nan], [8, 7, 655350]]) self.exp_bad_oc = np.array([[4, 3, 2], @@ -797,45 +793,45 @@ def setup_method(self): def test_call_numpy_with_invalid_value_in_status(self): """Test the CloudCompositorWithoutCloudfree composite generation.""" - status = xr.DataArray(np.array([[0, 0, 0], [0, 0, 65535], [0, 0, 1]]), dims=['y', 'x'], - attrs={'_FillValue': 65535}) + status = xr.DataArray(np.array([[0, 0, 0], [0, 0, 65535], [0, 0, 1]]), dims=["y", "x"], + attrs={"_FillValue": 65535}) data = xr.DataArray(np.array([[4, 3, 2], [2, 3, np.nan], [8, 7, np.nan]], dtype=np.float32), - dims=['y', 'x'], - attrs={'_FillValue': 65535, - 'scaled_FillValue': 655350}) + dims=["y", "x"], + attrs={"_FillValue": 65535, + "scaled_FillValue": 655350}) res = self.colormap_composite([data, status]) np.testing.assert_allclose(res, self.exp, atol=1e-4) def test_call_dask_with_invalid_value_in_status(self): """Test the CloudCompositorWithoutCloudfree composite generation.""" - status = xr.DataArray(da.from_array(np.array([[0, 0, 0], [0, 0, 65535], [0, 0, 1]])), dims=['y', 'x'], - attrs={'_FillValue': 65535}) + status = xr.DataArray(da.from_array(np.array([[0, 0, 0], [0, 0, 65535], [0, 0, 1]])), dims=["y", "x"], + attrs={"_FillValue": 65535}) data = xr.DataArray(da.from_array(np.array([[4, 3, 2], [2, 3, np.nan], [8, 7, np.nan]], dtype=np.float32)), - dims=['y', 'x'], - attrs={'_FillValue': 99, - 'scaled_FillValue': 655350}) + dims=["y", "x"], + attrs={"_FillValue": 99, + "scaled_FillValue": 655350}) res = self.colormap_composite([data, status]) np.testing.assert_allclose(res, self.exp, atol=1e-4) def test_call_bad_optical_conditions(self): """Test the CloudCompositorWithoutCloudfree composite generation.""" - status = xr.DataArray(da.from_array(np.array([[0, 0, 0], [3, 3, 3], [0, 0, 1]])), dims=['y', 'x'], - attrs={'_FillValue': 65535, - "flag_meanings": 'bad_optical_conditions'}) + status = xr.DataArray(da.from_array(np.array([[0, 0, 0], [3, 3, 3], [0, 0, 1]])), dims=["y", "x"], + attrs={"_FillValue": 65535, + "flag_meanings": "bad_optical_conditions"}) data = xr.DataArray(np.array([[4, 3, 2], [2, 255, 4], [255, 7, 255]], dtype=np.uint8), - dims=['y', 'x'], - name='cmic_cre', - attrs={'_FillValue': 255, - 'scaled_FillValue': 255}) + dims=["y", "x"], + name="cmic_cre", + attrs={"_FillValue": 255, + "scaled_FillValue": 255}) res = self.colormap_composite([data, status]) np.testing.assert_allclose(res, self.exp_bad_oc, atol=1e-4) def test_bad_indata(self): """Test the CloudCompositorWithoutCloudfree composite generation without status.""" data = xr.DataArray(np.array([[4, 3, 2], [2, 3, 4], [255, 7, 255]], dtype=np.uint8), - dims=['y', 'x'], - attrs={'_FillValue': 255, - 'scaled_FillValue': 255}) + dims=["y", "x"], + attrs={"_FillValue": 255, + "scaled_FillValue": 255}) np.testing.assert_raises(ValueError, self.colormap_composite, [data]) @@ -852,36 +848,36 @@ def setup_method(self): self.exp_b = np.array([[4, 3, 2], [2, 3, 255], [np.nan, np.nan, np.nan]]) - self.colormap_composite = CloudCompositorCommonMask('test_cmap_compositor') + self.colormap_composite = CloudCompositorCommonMask("test_cmap_compositor") def test_call_numpy(self): """Test the CloudCompositorCommonMask with numpy.""" - mask = xr.DataArray(np.array([[0, 0, 0], [1, 1, 1], [255, 255, 255]]), dims=['y', 'x'], - attrs={'_FillValue': 255}) + mask = xr.DataArray(np.array([[0, 0, 0], [1, 1, 1], [255, 255, 255]]), dims=["y", "x"], + attrs={"_FillValue": 255}) data = xr.DataArray(np.array([[4, 3, 2], [2, 3, np.nan], [np.nan, np.nan, np.nan]], dtype=np.float32), - dims=['y', 'x'], - attrs={'_FillValue': 65535, - 'scaled_FillValue': 655350}) + dims=["y", "x"], + attrs={"_FillValue": 65535, + "scaled_FillValue": 655350}) res = self.colormap_composite([data, mask]) np.testing.assert_allclose(res, self.exp_a, atol=1e-4) def test_call_dask(self): """Test the CloudCompositorCommonMask with dask.""" - mask = xr.DataArray(da.from_array(np.array([[0, 0, 0], [1, 1, 1], [255, 255, 255]])), dims=['y', 'x'], - attrs={'_FillValue': 255}) + mask = xr.DataArray(da.from_array(np.array([[0, 0, 0], [1, 1, 1], [255, 255, 255]])), dims=["y", "x"], + attrs={"_FillValue": 255}) data = xr.DataArray(da.from_array(np.array([[4, 3, 2], [2, 3, 255], [255, 255, 255]], dtype=np.int16)), - dims=['y', 'x'], - attrs={'_FillValue': 255, - 'scaled_FillValue': 255}) + dims=["y", "x"], + attrs={"_FillValue": 255, + "scaled_FillValue": 255}) res = self.colormap_composite([data, mask]) np.testing.assert_allclose(res, self.exp_b, atol=1e-4) def test_bad_call(self): """Test the CloudCompositorCommonMask without mask.""" data = xr.DataArray(np.array([[4, 3, 2], [2, 3, 255], [255, 255, 255]], dtype=np.int16), - dims=['y', 'x'], - attrs={'_FillValue': 255, - 'scaled_FillValue': 255}) + dims=["y", "x"], + attrs={"_FillValue": 255, + "scaled_FillValue": 255}) np.testing.assert_raises(ValueError, self.colormap_composite, [data]) @@ -891,16 +887,16 @@ class TestPrecipCloudsCompositor(unittest.TestCase): def test_call(self): """Test the precip composite generation.""" from satpy.composites.cloud_products import PrecipCloudsRGB - colormap_compositor = PrecipCloudsRGB('test_precip_compositor') + colormap_compositor = PrecipCloudsRGB("test_precip_compositor") data_light = xr.DataArray(np.array([[80, 70, 60, 0], [20, 30, 40, 255]], dtype=np.uint8), - dims=['y', 'x'], attrs={'_FillValue': 255}) + dims=["y", "x"], attrs={"_FillValue": 255}) data_moderate = xr.DataArray(np.array([[60, 50, 40, 0], [20, 30, 40, 255]], dtype=np.uint8), - dims=['y', 'x'], attrs={'_FillValue': 255}) + dims=["y", "x"], attrs={"_FillValue": 255}) data_intense = xr.DataArray(np.array([[40, 30, 20, 0], [20, 30, 40, 255]], dtype=np.uint8), - dims=['y', 'x'], attrs={'_FillValue': 255}) + dims=["y", "x"], attrs={"_FillValue": 255}) data_flags = xr.DataArray(np.array([[0, 0, 4, 0], [0, 0, 0, 0]], dtype=np.uint8), - dims=['y', 'x']) + dims=["y", "x"]) res = colormap_compositor([data_light, data_moderate, data_intense, data_flags]) exp = np.array([[[0.24313725, 0.18235294, 0.12156863, np.nan], @@ -919,35 +915,35 @@ class TestSingleBandCompositor(unittest.TestCase): def setUp(self): """Create test data.""" from satpy.composites import SingleBandCompositor - self.comp = SingleBandCompositor(name='test') + self.comp = SingleBandCompositor(name="test") all_valid = np.ones((2, 2)) - self.all_valid = xr.DataArray(all_valid, dims=['y', 'x']) + self.all_valid = xr.DataArray(all_valid, dims=["y", "x"]) def test_call(self): """Test calling the compositor.""" # Dataset with extra attributes all_valid = self.all_valid - all_valid.attrs['sensor'] = 'foo' + all_valid.attrs["sensor"] = "foo" attrs = { - 'foo': 'bar', - 'resolution': 333, - 'units': 'K', - 'sensor': {'fake_sensor1', 'fake_sensor2'}, - 'calibration': 'BT', - 'wavelength': 10.8 + "foo": "bar", + "resolution": 333, + "units": "K", + "sensor": {"fake_sensor1", "fake_sensor2"}, + "calibration": "BT", + "wavelength": 10.8 } - self.comp.attrs['resolution'] = None + self.comp.attrs["resolution"] = None res = self.comp([all_valid], **attrs) # Verify attributes - self.assertEqual(res.attrs.get('sensor'), 'foo') - self.assertTrue('foo' in res.attrs) - self.assertEqual(res.attrs.get('foo'), 'bar') - self.assertTrue('units' in res.attrs) - self.assertTrue('calibration' in res.attrs) - self.assertFalse('modifiers' in res.attrs) - self.assertEqual(res.attrs['wavelength'], 10.8) - self.assertEqual(res.attrs['resolution'], 333) + assert res.attrs.get("sensor") == "foo" + assert "foo" in res.attrs + assert res.attrs.get("foo") == "bar" + assert "units" in res.attrs + assert "calibration" in res.attrs + assert "modifiers" not in res.attrs + assert res.attrs["wavelength"] == 10.8 + assert res.attrs["resolution"] == 333 class TestCategoricalDataCompositor(unittest.TestCase): @@ -955,9 +951,9 @@ class TestCategoricalDataCompositor(unittest.TestCase): def setUp(self): """Create test data.""" - attrs = {'name': 'foo'} + attrs = {"name": "foo"} data = xr.DataArray(da.from_array([[2., 1.], [3., 0.]]), attrs=attrs, - dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]}) + dims=("y", "x"), coords={"y": [0, 1], "x": [0, 1]}) self.data = data @@ -965,20 +961,20 @@ def test_basic_recategorization(self): """Test general functionality of compositor incl. attributes.""" from satpy.composites import CategoricalDataCompositor lut = [np.nan, 0, 1, 1] - name = 'bar' + name = "bar" comp = CategoricalDataCompositor(name=name, lut=lut) res = comp([self.data]) res = res.compute() expected = np.array([[1., 0.], [1., np.nan]]) np.testing.assert_equal(res.values, expected) - np.testing.assert_equal(res.attrs['name'], name) - np.testing.assert_equal(res.attrs['composite_lut'], lut) + np.testing.assert_equal(res.attrs["name"], name) + np.testing.assert_equal(res.attrs["composite_lut"], lut) def test_too_many_datasets(self): """Test that ValueError is raised if more than one dataset is provided.""" from satpy.composites import CategoricalDataCompositor lut = [np.nan, 0, 1, 1] - comp = CategoricalDataCompositor(name='foo', lut=lut) + comp = CategoricalDataCompositor(name="foo", lut=lut) np.testing.assert_raises(ValueError, comp, [self.data, self.data]) @@ -988,19 +984,19 @@ class TestGenericCompositor(unittest.TestCase): def setUp(self): """Create test data.""" from satpy.composites import GenericCompositor - self.comp = GenericCompositor(name='test') - self.comp2 = GenericCompositor(name='test2', common_channel_mask=False) + self.comp = GenericCompositor(name="test") + self.comp2 = GenericCompositor(name="test2", common_channel_mask=False) all_valid = np.ones((1, 2, 2)) - self.all_valid = xr.DataArray(all_valid, dims=['bands', 'y', 'x']) + self.all_valid = xr.DataArray(all_valid, dims=["bands", "y", "x"]) first_invalid = np.reshape(np.array([np.nan, 1., 1., 1.]), (1, 2, 2)) self.first_invalid = xr.DataArray(first_invalid, - dims=['bands', 'y', 'x']) + dims=["bands", "y", "x"]) second_invalid = np.reshape(np.array([1., np.nan, 1., 1.]), (1, 2, 2)) self.second_invalid = xr.DataArray(second_invalid, - dims=['bands', 'y', 'x']) + dims=["bands", "y", "x"]) wrong_shape = np.reshape(np.array([1., 1., 1.]), (1, 3, 1)) - self.wrong_shape = xr.DataArray(wrong_shape, dims=['bands', 'y', 'x']) + self.wrong_shape = xr.DataArray(wrong_shape, dims=["bands", "y", "x"]) def test_masking(self): """Test masking in generic compositor.""" @@ -1021,49 +1017,49 @@ def test_masking(self): def test_concat_datasets(self): """Test concatenation of datasets.""" from satpy.composites import IncompatibleAreas - res = self.comp._concat_datasets([self.all_valid], 'L') + res = self.comp._concat_datasets([self.all_valid], "L") num_bands = len(res.bands) - self.assertEqual(num_bands, 1) - self.assertEqual(res.shape[0], num_bands) - self.assertEqual(res.bands[0], 'L') - res = self.comp._concat_datasets([self.all_valid, self.all_valid], 'LA') + assert num_bands == 1 + assert res.shape[0] == num_bands + assert res.bands[0] == "L" + res = self.comp._concat_datasets([self.all_valid, self.all_valid], "LA") num_bands = len(res.bands) - self.assertEqual(num_bands, 2) - self.assertEqual(res.shape[0], num_bands) - self.assertEqual(res.bands[0], 'L') - self.assertEqual(res.bands[1], 'A') + assert num_bands == 2 + assert res.shape[0] == num_bands + assert res.bands[0] == "L" + assert res.bands[1] == "A" self.assertRaises(IncompatibleAreas, self.comp._concat_datasets, - [self.all_valid, self.wrong_shape], 'LA') + [self.all_valid, self.wrong_shape], "LA") def test_get_sensors(self): """Test getting sensors from the dataset attributes.""" res = self.comp._get_sensors([self.all_valid]) - self.assertIsNone(res) + assert res is None dset1 = self.all_valid - dset1.attrs['sensor'] = 'foo' + dset1.attrs["sensor"] = "foo" res = self.comp._get_sensors([dset1]) - self.assertEqual(res, 'foo') + assert res == "foo" dset2 = self.first_invalid - dset2.attrs['sensor'] = 'bar' + dset2.attrs["sensor"] = "bar" res = self.comp._get_sensors([dset1, dset2]) - self.assertIn('foo', res) - self.assertIn('bar', res) - self.assertEqual(len(res), 2) - self.assertIsInstance(res, set) - - @mock.patch('satpy.composites.GenericCompositor._get_sensors') - @mock.patch('satpy.composites.combine_metadata') - @mock.patch('satpy.composites.check_times') - @mock.patch('satpy.composites.GenericCompositor.match_data_arrays') + assert "foo" in res + assert "bar" in res + assert len(res) == 2 + assert isinstance(res, set) + + @mock.patch("satpy.composites.GenericCompositor._get_sensors") + @mock.patch("satpy.composites.combine_metadata") + @mock.patch("satpy.composites.check_times") + @mock.patch("satpy.composites.GenericCompositor.match_data_arrays") def test_call_with_mock(self, match_data_arrays, check_times, combine_metadata, get_sensors): """Test calling generic compositor.""" from satpy.composites import IncompatibleAreas combine_metadata.return_value = dict() - get_sensors.return_value = 'foo' + get_sensors.return_value = "foo" # One dataset, no mode given res = self.comp([self.all_valid]) - self.assertEqual(res.shape[0], 1) - self.assertEqual(res.attrs['mode'], 'L') + assert res.shape[0] == 1 + assert res.attrs["mode"] == "L" match_data_arrays.assert_not_called() # This compositor has been initialized without common masking, so the # masking shouldn't have been called @@ -1088,25 +1084,25 @@ def test_call(self): """Test calling generic compositor.""" # Multiple datasets with extra attributes all_valid = self.all_valid - all_valid.attrs['sensor'] = 'foo' - attrs = {'foo': 'bar', 'resolution': 333} - self.comp.attrs['resolution'] = None + all_valid.attrs["sensor"] = "foo" + attrs = {"foo": "bar", "resolution": 333} + self.comp.attrs["resolution"] = None res = self.comp([self.all_valid, self.first_invalid], **attrs) # Verify attributes - self.assertEqual(res.attrs.get('sensor'), 'foo') - self.assertIn('foo', res.attrs) - self.assertEqual(res.attrs.get('foo'), 'bar') - self.assertNotIn('units', res.attrs) - self.assertNotIn('calibration', res.attrs) - self.assertNotIn('modifiers', res.attrs) - self.assertIsNone(res.attrs['wavelength']) - self.assertEqual(res.attrs['mode'], 'LA') - self.assertEqual(res.attrs['resolution'], 333) + assert res.attrs.get("sensor") == "foo" + assert "foo" in res.attrs + assert res.attrs.get("foo") == "bar" + assert "units" not in res.attrs + assert "calibration" not in res.attrs + assert "modifiers" not in res.attrs + assert res.attrs["wavelength"] is None + assert res.attrs["mode"] == "LA" + assert res.attrs["resolution"] == 333 def test_deprecation_warning(self): """Test deprecation warning for dcprecated composite recipes.""" - warning_message = 'foo is a deprecated composite. Use composite bar instead.' - self.comp.attrs['deprecation_warning'] = warning_message + warning_message = "foo is a deprecated composite. Use composite bar instead." + self.comp.attrs["deprecation_warning"] = warning_message with pytest.warns(UserWarning, match=warning_message): self.comp([self.all_valid]) @@ -1119,72 +1115,72 @@ def test_add_bands_l_rgb(self): from satpy.composites import add_bands # L + RGB -> RGB - data = xr.DataArray(da.ones((1, 3, 3)), dims=('bands', 'y', 'x'), - coords={'bands': ['L']}) - new_bands = xr.DataArray(da.array(['R', 'G', 'B']), dims=('bands'), - coords={'bands': ['R', 'G', 'B']}) + data = xr.DataArray(da.ones((1, 3, 3)), dims=("bands", "y", "x"), + coords={"bands": ["L"]}) + new_bands = xr.DataArray(da.array(["R", "G", "B"]), dims=("bands"), + coords={"bands": ["R", "G", "B"]}) res = add_bands(data, new_bands) - res_bands = ['R', 'G', 'B'] - self.assertEqual(res.attrs['mode'], ''.join(res_bands)) + res_bands = ["R", "G", "B"] + assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) - np.testing.assert_array_equal(res.coords['bands'], res_bands) + np.testing.assert_array_equal(res.coords["bands"], res_bands) def test_add_bands_l_rgba(self): """Test adding bands.""" from satpy.composites import add_bands # L + RGBA -> RGBA - data = xr.DataArray(da.ones((1, 3, 3)), dims=('bands', 'y', 'x'), - coords={'bands': ['L']}, attrs={'mode': 'L'}) - new_bands = xr.DataArray(da.array(['R', 'G', 'B', 'A']), dims=('bands'), - coords={'bands': ['R', 'G', 'B', 'A']}) + data = xr.DataArray(da.ones((1, 3, 3)), dims=("bands", "y", "x"), + coords={"bands": ["L"]}, attrs={"mode": "L"}) + new_bands = xr.DataArray(da.array(["R", "G", "B", "A"]), dims=("bands"), + coords={"bands": ["R", "G", "B", "A"]}) res = add_bands(data, new_bands) - res_bands = ['R', 'G', 'B', 'A'] - self.assertEqual(res.attrs['mode'], ''.join(res_bands)) + res_bands = ["R", "G", "B", "A"] + assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) - np.testing.assert_array_equal(res.coords['bands'], res_bands) + np.testing.assert_array_equal(res.coords["bands"], res_bands) def test_add_bands_la_rgb(self): """Test adding bands.""" from satpy.composites import add_bands # LA + RGB -> RGBA - data = xr.DataArray(da.ones((2, 3, 3)), dims=('bands', 'y', 'x'), - coords={'bands': ['L', 'A']}, attrs={'mode': 'LA'}) - new_bands = xr.DataArray(da.array(['R', 'G', 'B']), dims=('bands'), - coords={'bands': ['R', 'G', 'B']}) + data = xr.DataArray(da.ones((2, 3, 3)), dims=("bands", "y", "x"), + coords={"bands": ["L", "A"]}, attrs={"mode": "LA"}) + new_bands = xr.DataArray(da.array(["R", "G", "B"]), dims=("bands"), + coords={"bands": ["R", "G", "B"]}) res = add_bands(data, new_bands) - res_bands = ['R', 'G', 'B', 'A'] - self.assertEqual(res.attrs['mode'], ''.join(res_bands)) + res_bands = ["R", "G", "B", "A"] + assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) - np.testing.assert_array_equal(res.coords['bands'], res_bands) + np.testing.assert_array_equal(res.coords["bands"], res_bands) def test_add_bands_rgb_rbga(self): """Test adding bands.""" from satpy.composites import add_bands # RGB + RGBA -> RGBA - data = xr.DataArray(da.ones((3, 3, 3)), dims=('bands', 'y', 'x'), - coords={'bands': ['R', 'G', 'B']}, - attrs={'mode': 'RGB'}) - new_bands = xr.DataArray(da.array(['R', 'G', 'B', 'A']), dims=('bands'), - coords={'bands': ['R', 'G', 'B', 'A']}) + data = xr.DataArray(da.ones((3, 3, 3)), dims=("bands", "y", "x"), + coords={"bands": ["R", "G", "B"]}, + attrs={"mode": "RGB"}) + new_bands = xr.DataArray(da.array(["R", "G", "B", "A"]), dims=("bands"), + coords={"bands": ["R", "G", "B", "A"]}) res = add_bands(data, new_bands) - res_bands = ['R', 'G', 'B', 'A'] - self.assertEqual(res.attrs['mode'], ''.join(res_bands)) + res_bands = ["R", "G", "B", "A"] + assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) - np.testing.assert_array_equal(res.coords['bands'], res_bands) + np.testing.assert_array_equal(res.coords["bands"], res_bands) def test_add_bands_p_l(self): """Test adding bands.""" from satpy.composites import add_bands # P(RGBA) + L -> RGBA - data = xr.DataArray(da.ones((1, 3, 3)), dims=('bands', 'y', 'x'), - coords={'bands': ['P']}, - attrs={'mode': 'P'}) - new_bands = xr.DataArray(da.array(['L']), dims=('bands'), - coords={'bands': ['L']}) + data = xr.DataArray(da.ones((1, 3, 3)), dims=("bands", "y", "x"), + coords={"bands": ["P"]}, + attrs={"mode": "P"}) + new_bands = xr.DataArray(da.array(["L"]), dims=("bands"), + coords={"bands": ["L"]}) with pytest.raises(NotImplementedError): add_bands(data, new_bands) @@ -1192,7 +1188,7 @@ def test_add_bands_p_l(self): class TestStaticImageCompositor(unittest.TestCase): """Test case for the static compositor.""" - @mock.patch('satpy.resample.get_area_def') + @mock.patch("satpy.resample.get_area_def") def test_init(self, get_area_def): """Test the initializiation of static compositor.""" from satpy.composites import StaticImageCompositor @@ -1203,24 +1199,24 @@ def test_init(self, get_area_def): # No area defined comp = StaticImageCompositor("name", filename="/foo.tif") - self.assertEqual(comp._cache_filename, "/foo.tif") - self.assertIsNone(comp.area) + assert comp._cache_filename == "/foo.tif" + assert comp.area is None # Area defined get_area_def.return_value = "bar" comp = StaticImageCompositor("name", filename="/foo.tif", area="euro4") - self.assertEqual(comp._cache_filename, "/foo.tif") - self.assertEqual(comp.area, "bar") + assert comp._cache_filename == "/foo.tif" + assert comp.area == "bar" get_area_def.assert_called_once_with("euro4") - @mock.patch('satpy.aux_download.retrieve') - @mock.patch('satpy.aux_download.register_file') - @mock.patch('satpy.Scene') + @mock.patch("satpy.aux_download.retrieve") + @mock.patch("satpy.aux_download.register_file") + @mock.patch("satpy.Scene") def test_call(self, Scene, register, retrieve): # noqa """Test the static compositing.""" from satpy.composites import StaticImageCompositor - satpy.config.set(data_dir=os.path.join(os.path.sep, 'path', 'to', 'image')) + satpy.config.set(data_dir=os.path.join(os.path.sep, "path", "to", "image")) remote_tif = "http://example.com/foo.tif" class MockScene(dict): @@ -1230,20 +1226,20 @@ def load(self, arg): img = mock.MagicMock() img.attrs = {} scn = MockScene() - scn['image'] = img + scn["image"] = img Scene.return_value = scn # absolute path to local file comp = StaticImageCompositor("name", filename="/foo.tif", area="euro4") res = comp() - Scene.assert_called_once_with(reader='generic_image', - filenames=['/foo.tif']) + Scene.assert_called_once_with(reader="generic_image", + filenames=["/foo.tif"]) register.assert_not_called() retrieve.assert_not_called() - self.assertIn("start_time", res.attrs) - self.assertIn("end_time", res.attrs) - self.assertIsNone(res.attrs['sensor']) - self.assertNotIn('modifiers', res.attrs) - self.assertNotIn('calibration', res.attrs) + assert "start_time" in res.attrs + assert "end_time" in res.attrs + assert res.attrs["sensor"] is None + assert "modifiers" not in res.attrs + assert "calibration" not in res.attrs # remote file with local cached version Scene.reset_mock() @@ -1251,42 +1247,41 @@ def load(self, arg): retrieve.return_value = "data_dir/foo.tif" comp = StaticImageCompositor("name", url=remote_tif, area="euro4") res = comp() - Scene.assert_called_once_with(reader='generic_image', - filenames=['data_dir/foo.tif']) - self.assertIn("start_time", res.attrs) - self.assertIn("end_time", res.attrs) - self.assertIsNone(res.attrs['sensor']) - self.assertNotIn('modifiers', res.attrs) - self.assertNotIn('calibration', res.attrs) + Scene.assert_called_once_with(reader="generic_image", + filenames=["data_dir/foo.tif"]) + assert "start_time" in res.attrs + assert "end_time" in res.attrs + assert res.attrs["sensor"] is None + assert "modifiers" not in res.attrs + assert "calibration" not in res.attrs # Non-georeferenced image, no area given - img.attrs.pop('area') + img.attrs.pop("area") comp = StaticImageCompositor("name", filename="/foo.tif") with self.assertRaises(AttributeError): comp() # Non-georeferenced image, area given - comp = StaticImageCompositor("name", filename="/foo.tif", area='euro4') + comp = StaticImageCompositor("name", filename="/foo.tif", area="euro4") res = comp() - self.assertEqual(res.attrs['area'].area_id, 'euro4') + assert res.attrs["area"].area_id == "euro4" # Filename contains environment variable os.environ["TEST_IMAGE_PATH"] = "/path/to/image" - comp = StaticImageCompositor("name", filename="${TEST_IMAGE_PATH}/foo.tif", area='euro4') - self.assertEqual(comp._cache_filename, "/path/to/image/foo.tif") + comp = StaticImageCompositor("name", filename="${TEST_IMAGE_PATH}/foo.tif", area="euro4") + assert comp._cache_filename == "/path/to/image/foo.tif" # URL and filename without absolute path comp = StaticImageCompositor("name", url=remote_tif, filename="bar.tif") - self.assertEqual(comp._url, remote_tif) - self.assertEqual(comp._cache_filename, "bar.tif") + assert comp._url == remote_tif + assert comp._cache_filename == "bar.tif" # No URL, filename without absolute path, use default data_dir from config - with mock.patch('os.path.exists') as exists: + with mock.patch("os.path.exists") as exists: exists.return_value = True comp = StaticImageCompositor("name", filename="foo.tif") - self.assertEqual(comp._url, None) - self.assertEqual(comp._cache_filename, - os.path.join(os.path.sep, 'path', 'to', 'image', 'foo.tif')) + assert comp._url is None + assert comp._cache_filename == os.path.join(os.path.sep, "path", "to", "image", "foo.tif") def _enhance2dataset(dataset, convert_p=False): @@ -1315,21 +1310,21 @@ def setup_class(cls): } cls.foreground_data = foreground_data - @mock.patch('satpy.composites.enhance2dataset', _enhance2dataset) + @mock.patch("satpy.composites.enhance2dataset", _enhance2dataset) @pytest.mark.parametrize( - ('foreground_bands', 'background_bands', 'exp_bands', 'exp_result'), + ("foreground_bands", "background_bands", "exp_bands", "exp_result"), [ - ('L', 'L', 'L', np.array([[1.0, 0.5], [0.0, 1.0]])), - ('LA', 'LA', 'L', np.array([[1.0, 0.75], [0.5, 1.0]])), - ('RGB', 'RGB', 'RGB', np.array([ + ("L", "L", "L", np.array([[1.0, 0.5], [0.0, 1.0]])), + ("LA", "LA", "L", np.array([[1.0, 0.75], [0.5, 1.0]])), + ("RGB", "RGB", "RGB", np.array([ [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]], [[1., 0.5], [0., 1.]]])), - ('RGBA', 'RGBA', 'RGB', np.array([ + ("RGBA", "RGBA", "RGB", np.array([ [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]]])), - ('RGBA', 'RGB', 'RGB', np.array([ + ("RGBA", "RGB", "RGB", np.array([ [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]], [[1., 0.75], [0.5, 1.]]])), @@ -1342,119 +1337,119 @@ def test_call(self, foreground_bands, background_bands, exp_bands, exp_result): # L mode images foreground_data = self.foreground_data[foreground_bands] - attrs = {'mode': foreground_bands, 'area': 'foo'} + attrs = {"mode": foreground_bands, "area": "foo"} foreground = xr.DataArray(da.from_array(foreground_data), - dims=('bands', 'y', 'x'), - coords={'bands': [c for c in attrs['mode']]}, + dims=("bands", "y", "x"), + coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs) - attrs = {'mode': background_bands, 'area': 'foo'} - background = xr.DataArray(da.ones((len(background_bands), 2, 2)), dims=('bands', 'y', 'x'), - coords={'bands': [c for c in attrs['mode']]}, + attrs = {"mode": background_bands, "area": "foo"} + background = xr.DataArray(da.ones((len(background_bands), 2, 2)), dims=("bands", "y", "x"), + coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs) res = comp([foreground, background]) - assert res.attrs['area'] == 'foo' + assert res.attrs["area"] == "foo" np.testing.assert_allclose(res, exp_result) - assert res.attrs['mode'] == exp_bands + assert res.attrs["mode"] == exp_bands - @mock.patch('satpy.composites.enhance2dataset', _enhance2dataset) + @mock.patch("satpy.composites.enhance2dataset", _enhance2dataset) def test_multiple_sensors(self): """Test the background compositing from multiple sensor data.""" from satpy.composites import BackgroundCompositor comp = BackgroundCompositor("name") # L mode images - attrs = {'mode': 'L', 'area': 'foo'} + attrs = {"mode": "L", "area": "foo"} foreground_data = self.foreground_data["L"] foreground = xr.DataArray(da.from_array(foreground_data), - dims=('bands', 'y', 'x'), - coords={'bands': [c for c in attrs['mode']]}, + dims=("bands", "y", "x"), + coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs.copy()) - foreground.attrs['sensor'] = 'abi' - background = xr.DataArray(da.ones((1, 2, 2)), dims=('bands', 'y', 'x'), - coords={'bands': [c for c in attrs['mode']]}, + foreground.attrs["sensor"] = "abi" + background = xr.DataArray(da.ones((1, 2, 2)), dims=("bands", "y", "x"), + coords={"bands": [c for c in attrs["mode"]]}, attrs=attrs.copy()) - background.attrs['sensor'] = 'glm' + background.attrs["sensor"] = "glm" res = comp([foreground, background]) - assert res.attrs['area'] == 'foo' + assert res.attrs["area"] == "foo" np.testing.assert_allclose(res, np.array([[1., 0.5], [0., 1.]])) - assert res.attrs['mode'] == 'L' - assert res.attrs['sensor'] == {'abi', 'glm'} + assert res.attrs["mode"] == "L" + assert res.attrs["sensor"] == {"abi", "glm"} class TestMaskingCompositor: """Test case for the simple masking compositor.""" - @pytest.fixture + @pytest.fixture() def conditions_v1(self): """Masking conditions with string values.""" - return [{'method': 'equal', - 'value': 'Cloud-free_land', - 'transparency': 100}, - {'method': 'equal', - 'value': 'Cloud-free_sea', - 'transparency': 50}] - - @pytest.fixture + return [{"method": "equal", + "value": "Cloud-free_land", + "transparency": 100}, + {"method": "equal", + "value": "Cloud-free_sea", + "transparency": 50}] + + @pytest.fixture() def conditions_v2(self): """Masking conditions with numerical values.""" - return [{'method': 'equal', - 'value': 1, - 'transparency': 100}, - {'method': 'equal', - 'value': 2, - 'transparency': 50}] - - @pytest.fixture + return [{"method": "equal", + "value": 1, + "transparency": 100}, + {"method": "equal", + "value": 2, + "transparency": 50}] + + @pytest.fixture() def test_data(self): """Test data to use with masking compositors.""" - return xr.DataArray(da.random.random((3, 3)), dims=['y', 'x']) + return xr.DataArray(da.random.random((3, 3)), dims=["y", "x"]) - @pytest.fixture + @pytest.fixture() def test_ct_data(self): """Test 2D CT data array.""" - flag_meanings = ['Cloud-free_land', 'Cloud-free_sea'] + flag_meanings = ["Cloud-free_land", "Cloud-free_sea"] flag_values = da.array([1, 2]) ct_data = da.array([[1, 2, 2], [2, 1, 2], [2, 2, 1]]) - ct_data = xr.DataArray(ct_data, dims=['y', 'x']) - ct_data.attrs['flag_meanings'] = flag_meanings - ct_data.attrs['flag_values'] = flag_values + ct_data = xr.DataArray(ct_data, dims=["y", "x"]) + ct_data.attrs["flag_meanings"] = flag_meanings + ct_data.attrs["flag_values"] = flag_values return ct_data - @pytest.fixture + @pytest.fixture() def test_ct_data_v3(self, test_ct_data): """Set ct data to NaN where it originally is 1.""" return test_ct_data.where(test_ct_data == 1) - @pytest.fixture + @pytest.fixture() def reference_data(self, test_data, test_ct_data): """Get reference data to use in masking compositor tests.""" # The data are set to NaN where ct is `1` return test_data.where(test_ct_data > 1) - @pytest.fixture + @pytest.fixture() def reference_alpha(self): """Get reference alpha to use in masking compositor tests.""" ref_alpha = da.array([[0, 0.5, 0.5], [0.5, 0, 0.5], [0.5, 0.5, 0]]) - return xr.DataArray(ref_alpha, dims=['y', 'x']) + return xr.DataArray(ref_alpha, dims=["y", "x"]) def test_init(self): """Test the initializiation of compositor.""" from satpy.composites import MaskingCompositor # No transparency or conditions given raises ValueError - with pytest.raises(ValueError): - comp = MaskingCompositor("name") + with pytest.raises(ValueError, match="Masking conditions not defined."): + _ = MaskingCompositor("name") # transparency defined transparency = {0: 100, 1: 50} - conditions = [{'method': 'equal', 'value': 0, 'transparency': 100}, - {'method': 'equal', 'value': 1, 'transparency': 50}] + conditions = [{"method": "equal", "value": 0, "transparency": 100}, + {"method": "equal", "value": 1, "transparency": 50}] comp = MaskingCompositor("name", transparency=transparency.copy()) - assert not hasattr(comp, 'transparency') + assert not hasattr(comp, "transparency") # Transparency should be converted to conditions assert comp.conditions == conditions @@ -1470,18 +1465,18 @@ def test_get_flag_value(self): mask = da.array([[1, 2, 2], [2, 1, 2], [2, 2, 1]]) - mask = xr.DataArray(mask, dims=['y', 'x']) - flag_meanings = ['Cloud-free_land', 'Cloud-free_sea'] - mask.attrs['flag_meanings'] = flag_meanings - mask.attrs['flag_values'] = flag_values + mask = xr.DataArray(mask, dims=["y", "x"]) + flag_meanings = ["Cloud-free_land", "Cloud-free_sea"] + mask.attrs["flag_meanings"] = flag_meanings + mask.attrs["flag_values"] = flag_values - assert _get_flag_value(mask, 'Cloud-free_land') == 1 - assert _get_flag_value(mask, 'Cloud-free_sea') == 2 + assert _get_flag_value(mask, "Cloud-free_land") == 1 + assert _get_flag_value(mask, "Cloud-free_sea") == 2 - flag_meanings_str = 'Cloud-free_land Cloud-free_sea' - mask.attrs['flag_meanings'] = flag_meanings_str - assert _get_flag_value(mask, 'Cloud-free_land') == 1 - assert _get_flag_value(mask, 'Cloud-free_sea') == 2 + flag_meanings_str = "Cloud-free_land Cloud-free_sea" + mask.attrs["flag_meanings"] = flag_meanings_str + assert _get_flag_value(mask, "Cloud-free_land") == 1 + assert _get_flag_value(mask, "Cloud-free_sea") == 2 @pytest.mark.parametrize("mode", ["LA", "RGBA"]) def test_call_numerical_transparency_data( @@ -1502,7 +1497,7 @@ def test_call_numerical_transparency_data( assert res.mode == mode for m in mode.rstrip("A"): np.testing.assert_allclose(res.sel(bands=m), reference_data) - np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) + np.testing.assert_allclose(res.sel(bands="A"), reference_alpha) def test_call_named_fields(self, conditions_v2, test_data, test_ct_data, reference_data, reference_alpha): @@ -1514,8 +1509,8 @@ def test_call_named_fields(self, conditions_v2, test_data, test_ct_data, comp = MaskingCompositor("name", conditions=conditions_v2) res = comp([test_data, test_ct_data]) assert res.mode == "LA" - np.testing.assert_allclose(res.sel(bands='L'), reference_data) - np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) + np.testing.assert_allclose(res.sel(bands="L"), reference_data) + np.testing.assert_allclose(res.sel(bands="A"), reference_alpha) def test_call_named_fields_string( self, conditions_v2, test_data, test_ct_data, reference_data, @@ -1524,14 +1519,14 @@ def test_call_named_fields_string( from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler - flag_meanings_str = 'Cloud-free_land Cloud-free_sea' - test_ct_data.attrs['flag_meanings'] = flag_meanings_str + flag_meanings_str = "Cloud-free_land Cloud-free_sea" + test_ct_data.attrs["flag_meanings"] = flag_meanings_str with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v2) res = comp([test_data, test_ct_data]) assert res.mode == "LA" - np.testing.assert_allclose(res.sel(bands='L'), reference_data) - np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) + np.testing.assert_allclose(res.sel(bands="L"), reference_data) + np.testing.assert_allclose(res.sel(bands="A"), reference_alpha) def test_method_isnan(self, test_data, test_ct_data, test_ct_data_v3): @@ -1539,27 +1534,27 @@ def test_method_isnan(self, test_data, from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler - conditions_v3 = [{'method': 'isnan', 'transparency': 100}] + conditions_v3 = [{"method": "isnan", "transparency": 100}] # The data are set to NaN where ct is NaN reference_data_v3 = test_data.where(test_ct_data == 1) reference_alpha_v3 = da.array([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]]) - reference_alpha_v3 = xr.DataArray(reference_alpha_v3, dims=['y', 'x']) + reference_alpha_v3 = xr.DataArray(reference_alpha_v3, dims=["y", "x"]) with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v3) res = comp([test_data, test_ct_data_v3]) assert res.mode == "LA" - np.testing.assert_allclose(res.sel(bands='L'), reference_data_v3) - np.testing.assert_allclose(res.sel(bands='A'), reference_alpha_v3) + np.testing.assert_allclose(res.sel(bands="L"), reference_data_v3) + np.testing.assert_allclose(res.sel(bands="A"), reference_alpha_v3) def test_method_absolute_import(self, test_data, test_ct_data_v3): """Test "absolute_import" as method.""" from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler - conditions_v4 = [{'method': 'absolute_import', 'transparency': 'satpy.resample'}] + conditions_v4 = [{"method": "absolute_import", "transparency": "satpy.resample"}] # This should raise AttributeError with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v4) @@ -1573,55 +1568,55 @@ def test_rgb_dataset(self, conditions_v1, test_ct_data, reference_alpha): # 3D data array data = xr.DataArray(da.random.random((3, 3, 3)), - dims=['bands', 'y', 'x'], - coords={'bands': ['R', 'G', 'B'], - 'y': np.arange(3), - 'x': np.arange(3)}) + dims=["bands", "y", "x"], + coords={"bands": ["R", "G", "B"], + "y": np.arange(3), + "x": np.arange(3)}) with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v1) res = comp([data, test_ct_data]) assert res.mode == "RGBA" - np.testing.assert_allclose(res.sel(bands='R'), - data.sel(bands='R').where(test_ct_data > 1)) - np.testing.assert_allclose(res.sel(bands='G'), - data.sel(bands='G').where(test_ct_data > 1)) - np.testing.assert_allclose(res.sel(bands='B'), - data.sel(bands='B').where(test_ct_data > 1)) - np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) + np.testing.assert_allclose(res.sel(bands="R"), + data.sel(bands="R").where(test_ct_data > 1)) + np.testing.assert_allclose(res.sel(bands="G"), + data.sel(bands="G").where(test_ct_data > 1)) + np.testing.assert_allclose(res.sel(bands="B"), + data.sel(bands="B").where(test_ct_data > 1)) + np.testing.assert_allclose(res.sel(bands="A"), reference_alpha) def test_rgba_dataset(self, conditions_v2, test_ct_data, reference_alpha): """Test RGBA dataset.""" from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler data = xr.DataArray(da.random.random((4, 3, 3)), - dims=['bands', 'y', 'x'], - coords={'bands': ['R', 'G', 'B', 'A'], - 'y': np.arange(3), - 'x': np.arange(3)}) + dims=["bands", "y", "x"], + coords={"bands": ["R", "G", "B", "A"], + "y": np.arange(3), + "x": np.arange(3)}) with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v2) res = comp([data, test_ct_data]) assert res.mode == "RGBA" - np.testing.assert_allclose(res.sel(bands='R'), - data.sel(bands='R').where(test_ct_data > 1)) - np.testing.assert_allclose(res.sel(bands='G'), - data.sel(bands='G').where(test_ct_data > 1)) - np.testing.assert_allclose(res.sel(bands='B'), - data.sel(bands='B').where(test_ct_data > 1)) + np.testing.assert_allclose(res.sel(bands="R"), + data.sel(bands="R").where(test_ct_data > 1)) + np.testing.assert_allclose(res.sel(bands="G"), + data.sel(bands="G").where(test_ct_data > 1)) + np.testing.assert_allclose(res.sel(bands="B"), + data.sel(bands="B").where(test_ct_data > 1)) # The compositor should drop the original alpha band - np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) + np.testing.assert_allclose(res.sel(bands="A"), reference_alpha) def test_incorrect_method(self, test_data, test_ct_data): """Test incorrect method.""" from satpy.composites import MaskingCompositor - conditions = [{'method': 'foo', 'value': 0, 'transparency': 100}] + conditions = [{"method": "foo", "value": 0, "transparency": 100}] comp = MaskingCompositor("name", conditions=conditions) with pytest.raises(AttributeError): comp([test_data, test_ct_data]) # Test with too few projectables. - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Expected 2 datasets, got 1"): comp([test_data]) def test_incorrect_mode(self, conditions_v1): @@ -1629,7 +1624,7 @@ def test_incorrect_mode(self, conditions_v1): from satpy.composites import MaskingCompositor # Incorrect mode raises ValueError - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Invalid mode YCbCrA. Supported modes: .*"): MaskingCompositor("name", conditions=conditions_v1, mode="YCbCrA") @@ -1646,12 +1641,12 @@ def setUp(self): self.ch08_w = 3.0 self.ch06_w = 4.0 - @mock.patch('satpy.composites.NaturalEnh.__repr__') - @mock.patch('satpy.composites.NaturalEnh.match_data_arrays') + @mock.patch("satpy.composites.NaturalEnh.__repr__") + @mock.patch("satpy.composites.NaturalEnh.match_data_arrays") def test_natural_enh(self, match_data_arrays, repr_): """Test NaturalEnh compositor.""" from satpy.composites import NaturalEnh - repr_.return_value = '' + repr_.return_value = "" projectables = [self.ch1, self.ch2, self.ch3] def temp_func(*args): @@ -1660,73 +1655,73 @@ def temp_func(*args): match_data_arrays.side_effect = temp_func comp = NaturalEnh("foo", ch16_w=self.ch16_w, ch08_w=self.ch08_w, ch06_w=self.ch06_w) - self.assertEqual(comp.ch16_w, self.ch16_w) - self.assertEqual(comp.ch08_w, self.ch08_w) - self.assertEqual(comp.ch06_w, self.ch06_w) + assert comp.ch16_w == self.ch16_w + assert comp.ch08_w == self.ch08_w + assert comp.ch06_w == self.ch06_w res = comp(projectables) assert mock.call(projectables) in match_data_arrays.mock_calls correct = (self.ch16_w * projectables[0] + self.ch08_w * projectables[1] + self.ch06_w * projectables[2]) - self.assertEqual(res[0], correct) - self.assertEqual(res[1], projectables[1]) - self.assertEqual(res[2], projectables[2]) + assert res[0] == correct + assert res[1] == projectables[1] + assert res[2] == projectables[2] class TestEnhance2Dataset(unittest.TestCase): """Test the enhance2dataset utility.""" - @mock.patch('satpy.composites.get_enhanced_image') + @mock.patch("satpy.composites.get_enhanced_image") def test_enhance_p_to_rgb(self, get_enhanced_image): """Test enhancing a paletted dataset in RGB mode.""" from trollimage.xrimage import XRImage - img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=('bands', 'y', 'x'), coords={'bands': ['P']})) + img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=("bands", "y", "x"), coords={"bands": ["P"]})) img.palette = ((0, 0, 0), (4, 4, 4), (8, 8, 8)) get_enhanced_image.return_value = img from satpy.composites import enhance2dataset dataset = xr.DataArray(np.ones((1, 20, 20))) res = enhance2dataset(dataset, convert_p=True) - assert res.attrs['mode'] == 'RGB' + assert res.attrs["mode"] == "RGB" - @mock.patch('satpy.composites.get_enhanced_image') + @mock.patch("satpy.composites.get_enhanced_image") def test_enhance_p_to_rgba(self, get_enhanced_image): """Test enhancing a paletted dataset in RGBA mode.""" from trollimage.xrimage import XRImage - img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=('bands', 'y', 'x'), coords={'bands': ['P']})) + img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=("bands", "y", "x"), coords={"bands": ["P"]})) img.palette = ((0, 0, 0, 255), (4, 4, 4, 255), (8, 8, 8, 255)) get_enhanced_image.return_value = img from satpy.composites import enhance2dataset dataset = xr.DataArray(np.ones((1, 20, 20))) res = enhance2dataset(dataset, convert_p=True) - assert res.attrs['mode'] == 'RGBA' + assert res.attrs["mode"] == "RGBA" - @mock.patch('satpy.composites.get_enhanced_image') + @mock.patch("satpy.composites.get_enhanced_image") def test_enhance_p(self, get_enhanced_image): """Test enhancing a paletted dataset in P mode.""" from trollimage.xrimage import XRImage - img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=('bands', 'y', 'x'), coords={'bands': ['P']})) + img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=("bands", "y", "x"), coords={"bands": ["P"]})) img.palette = ((0, 0, 0, 255), (4, 4, 4, 255), (8, 8, 8, 255)) get_enhanced_image.return_value = img from satpy.composites import enhance2dataset dataset = xr.DataArray(np.ones((1, 20, 20))) res = enhance2dataset(dataset) - assert res.attrs['mode'] == 'P' + assert res.attrs["mode"] == "P" assert res.max().values == 2 - @mock.patch('satpy.composites.get_enhanced_image') + @mock.patch("satpy.composites.get_enhanced_image") def test_enhance_l(self, get_enhanced_image): """Test enhancing a paletted dataset in P mode.""" from trollimage.xrimage import XRImage - img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=('bands', 'y', 'x'), coords={'bands': ['L']})) + img = XRImage(xr.DataArray(np.ones((1, 20, 20)) * 2, dims=("bands", "y", "x"), coords={"bands": ["L"]})) get_enhanced_image.return_value = img from satpy.composites import enhance2dataset dataset = xr.DataArray(np.ones((1, 20, 20))) res = enhance2dataset(dataset) - assert res.attrs['mode'] == 'L' + assert res.attrs["mode"] == "L" assert res.max().values == 1 @@ -1736,29 +1731,29 @@ class TestInferMode(unittest.TestCase): def test_bands_coords_is_used(self): """Test that the `bands` coord is used.""" from satpy.composites import GenericCompositor - arr = xr.DataArray(np.ones((1, 5, 5)), dims=('bands', 'x', 'y'), coords={'bands': ['P']}) - assert GenericCompositor.infer_mode(arr) == 'P' + arr = xr.DataArray(np.ones((1, 5, 5)), dims=("bands", "x", "y"), coords={"bands": ["P"]}) + assert GenericCompositor.infer_mode(arr) == "P" - arr = xr.DataArray(np.ones((3, 5, 5)), dims=('bands', 'x', 'y'), coords={'bands': ['Y', 'Cb', 'Cr']}) - assert GenericCompositor.infer_mode(arr) == 'YCbCr' + arr = xr.DataArray(np.ones((3, 5, 5)), dims=("bands", "x", "y"), coords={"bands": ["Y", "Cb", "Cr"]}) + assert GenericCompositor.infer_mode(arr) == "YCbCr" def test_mode_is_used(self): """Test that the `mode` attribute is used.""" from satpy.composites import GenericCompositor - arr = xr.DataArray(np.ones((1, 5, 5)), dims=('bands', 'x', 'y'), attrs={'mode': 'P'}) - assert GenericCompositor.infer_mode(arr) == 'P' + arr = xr.DataArray(np.ones((1, 5, 5)), dims=("bands", "x", "y"), attrs={"mode": "P"}) + assert GenericCompositor.infer_mode(arr) == "P" def test_band_size_is_used(self): """Test that the band size is used.""" from satpy.composites import GenericCompositor - arr = xr.DataArray(np.ones((2, 5, 5)), dims=('bands', 'x', 'y')) - assert GenericCompositor.infer_mode(arr) == 'LA' + arr = xr.DataArray(np.ones((2, 5, 5)), dims=("bands", "x", "y")) + assert GenericCompositor.infer_mode(arr) == "LA" def test_no_bands_is_l(self): """Test that default (no band) is L.""" from satpy.composites import GenericCompositor - arr = xr.DataArray(np.ones((5, 5)), dims=('x', 'y')) - assert GenericCompositor.infer_mode(arr) == 'L' + arr = xr.DataArray(np.ones((5, 5)), dims=("x", "y")) + assert GenericCompositor.infer_mode(arr) == "L" class TestLongitudeMaskingCompositor(unittest.TestCase): @@ -1772,26 +1767,26 @@ def test_masking(self): lons = np.array([-180., -100., -50., 0., 50., 100., 180.]) area.get_lonlats = mock.MagicMock(return_value=[lons, []]) a = xr.DataArray(np.array([1, 2, 3, 4, 5, 6, 7]), - attrs={'area': area, 'units': 'K'}) + attrs={"area": area, "units": "K"}) - comp = LongitudeMaskingCompositor(name='test', lon_min=-40., lon_max=120.) + comp = LongitudeMaskingCompositor(name="test", lon_min=-40., lon_max=120.) expected = xr.DataArray(np.array([np.nan, np.nan, np.nan, 4, 5, 6, np.nan])) res = comp([a]) np.testing.assert_allclose(res.data, expected.data) assert "units" in res.attrs assert res.attrs["units"] == "K" - comp = LongitudeMaskingCompositor(name='test', lon_min=-40.) + comp = LongitudeMaskingCompositor(name="test", lon_min=-40.) expected = xr.DataArray(np.array([np.nan, np.nan, np.nan, 4, 5, 6, 7])) res = comp([a]) np.testing.assert_allclose(res.data, expected.data) - comp = LongitudeMaskingCompositor(name='test', lon_max=120.) + comp = LongitudeMaskingCompositor(name="test", lon_max=120.) expected = xr.DataArray(np.array([1, 2, 3, 4, 5, 6, np.nan])) res = comp([a]) np.testing.assert_allclose(res.data, expected.data) - comp = LongitudeMaskingCompositor(name='test', lon_min=120., lon_max=-40.) + comp = LongitudeMaskingCompositor(name="test", lon_min=120., lon_max=-40.) expected = xr.DataArray(np.array([1, 2, 3, np.nan, np.nan, np.nan, 7])) res = comp([a]) np.testing.assert_allclose(res.data, expected.data) diff --git a/satpy/tests/test_config.py b/satpy/tests/test_config.py index 10d3205223..df33436b45 100644 --- a/satpy/tests/test_config.py +++ b/satpy/tests/test_config.py @@ -59,7 +59,7 @@ def test_areas_pyproj(self): swath_def = SwathDefinition(lons, lats) all_areas = parse_area_file(get_area_file()) for area_obj in all_areas: - if hasattr(area_obj, 'freeze'): + if hasattr(area_obj, "freeze"): try: area_obj = area_obj.freeze(lonslats=swath_def) except RuntimeError: @@ -74,7 +74,7 @@ def test_areas_rasterio(self): from rasterio.crs import CRS except ImportError: return unittest.skip("Missing rasterio dependency") - if not hasattr(CRS, 'from_dict'): + if not hasattr(CRS, "from_dict"): return unittest.skip("RasterIO 1.0+ required") import numpy as np @@ -91,7 +91,7 @@ def test_areas_rasterio(self): swath_def = SwathDefinition(lons, lats) all_areas = parse_area_file(get_area_file()) for area_obj in all_areas: - if hasattr(area_obj, 'freeze'): + if hasattr(area_obj, "freeze"): try: area_obj = area_obj.freeze(lonslats=swath_def) except RuntimeError: @@ -115,8 +115,8 @@ def fake_plugin_etc_path( etc_path, entry_points, module_paths = _get_entry_points_and_etc_paths(tmp_path, entry_point_names) fake_iter_entry_points = _create_fake_iter_entry_points(entry_points) fake_importlib_files = _create_fake_importlib_files(module_paths) - with mock.patch('satpy._config.entry_points', fake_iter_entry_points), \ - mock.patch('satpy._config.impr_files', fake_importlib_files): + with mock.patch("satpy._config.entry_points", fake_iter_entry_points), \ + mock.patch("satpy._config.impr_files", fake_importlib_files): yield etc_path @@ -154,7 +154,7 @@ def _fake_importlib_files(module_name: str) -> Path: return _fake_importlib_files -@pytest.fixture +@pytest.fixture() def fake_composite_plugin_etc_path(tmp_path: Path) -> Iterator[Path]: """Create a fake plugin entry point with a fake compositor YAML configuration file.""" yield from _create_yamlbased_plugin( @@ -182,7 +182,7 @@ def _write_fake_composite_yaml(yaml_filename: str) -> None: """) -@pytest.fixture +@pytest.fixture() def fake_reader_plugin_etc_path(tmp_path: Path) -> Iterator[Path]: """Create a fake plugin entry point with a fake reader YAML configuration file.""" yield from _create_yamlbased_plugin( @@ -205,7 +205,7 @@ def _write_fake_reader_yaml(yaml_filename: str) -> None: """) -@pytest.fixture +@pytest.fixture() def fake_writer_plugin_etc_path(tmp_path: Path) -> Iterator[Path]: """Create a fake plugin entry point with a fake writer YAML configuration file.""" yield from _create_yamlbased_plugin( @@ -226,7 +226,7 @@ def _write_fake_writer_yaml(yaml_filename: str) -> None: """) -@pytest.fixture +@pytest.fixture() def fake_enh_plugin_etc_path(tmp_path: Path) -> Iterator[Path]: """Create a fake plugin entry point with a fake enhancement YAML configure files. @@ -298,7 +298,7 @@ def test_get_plugin_configs(self, fake_composite_plugin_etc_path): from satpy._config import get_entry_points_config_dirs with satpy.config.set(config_path=[]): - dirs = get_entry_points_config_dirs('satpy.composites') + dirs = get_entry_points_config_dirs("satpy.composites") assert dirs == [str(fake_composite_plugin_etc_path)] def test_load_entry_point_composite(self, fake_composite_plugin_etc_path): @@ -393,16 +393,16 @@ def test_custom_config_file(self): import satpy my_config_dict = { - 'cache_dir': "/path/to/cache", + "cache_dir": "/path/to/cache", } try: - with tempfile.NamedTemporaryFile(mode='w+t', suffix='.yaml', delete=False) as tfile: + with tempfile.NamedTemporaryFile(mode="w+t", suffix=".yaml", delete=False) as tfile: yaml.dump(my_config_dict, tfile) tfile.close() - with mock.patch.dict('os.environ', {'SATPY_CONFIG': tfile.name}): + with mock.patch.dict("os.environ", {"SATPY_CONFIG": tfile.name}): reload(satpy._config) reload(satpy) - assert satpy.config.get('cache_dir') == '/path/to/cache' + assert satpy.config.get("cache_dir") == "/path/to/cache" finally: os.remove(tfile.name) @@ -412,15 +412,15 @@ def test_deprecated_env_vars(self): import satpy old_vars = { - 'PPP_CONFIG_DIR': '/my/ppp/config/dir', - 'SATPY_ANCPATH': '/my/ancpath', + "PPP_CONFIG_DIR": "/my/ppp/config/dir", + "SATPY_ANCPATH": "/my/ancpath", } - with mock.patch.dict('os.environ', old_vars): + with mock.patch.dict("os.environ", old_vars): reload(satpy._config) reload(satpy) - assert satpy.config.get('data_dir') == '/my/ancpath' - assert satpy.config.get('config_path') == ['/my/ppp/config/dir'] + assert satpy.config.get("data_dir") == "/my/ancpath" + assert satpy.config.get("config_path") == ["/my/ppp/config/dir"] def test_config_path_multiple(self): """Test that multiple config paths are accepted.""" @@ -429,13 +429,13 @@ def test_config_path_multiple(self): import satpy exp_paths, env_paths = _os_specific_multipaths() old_vars = { - 'SATPY_CONFIG_PATH': env_paths, + "SATPY_CONFIG_PATH": env_paths, } - with mock.patch.dict('os.environ', old_vars): + with mock.patch.dict("os.environ", old_vars): reload(satpy._config) reload(satpy) - assert satpy.config.get('config_path') == exp_paths + assert satpy.config.get("config_path") == exp_paths def test_config_path_multiple_load(self): """Test that config paths from subprocesses load properly. @@ -449,10 +449,10 @@ def test_config_path_multiple_load(self): import satpy exp_paths, env_paths = _os_specific_multipaths() old_vars = { - 'SATPY_CONFIG_PATH': env_paths, + "SATPY_CONFIG_PATH": env_paths, } - with mock.patch.dict('os.environ', old_vars): + with mock.patch.dict("os.environ", old_vars): # these reloads will update env variable "SATPY_CONFIG_PATH" reload(satpy._config) reload(satpy) @@ -460,7 +460,7 @@ def test_config_path_multiple_load(self): # load the updated env variable and parse it again. reload(satpy._config) reload(satpy) - assert satpy.config.get('config_path') == exp_paths + assert satpy.config.get("config_path") == exp_paths def test_bad_str_config_path(self): """Test that a str config path isn't allowed.""" @@ -468,18 +468,20 @@ def test_bad_str_config_path(self): import satpy old_vars = { - 'SATPY_CONFIG_PATH': '/my/configs1', + "SATPY_CONFIG_PATH": "/my/configs1", } # single path from env var still works - with mock.patch.dict('os.environ', old_vars): + with mock.patch.dict("os.environ", old_vars): reload(satpy._config) reload(satpy) - assert satpy.config.get('config_path') == ['/my/configs1'] + assert satpy.config.get("config_path") == ["/my/configs1"] # strings are not allowed, lists are - with satpy.config.set(config_path='/single/string/paths/are/bad'): - pytest.raises(ValueError, satpy._config.get_config_path_safe) + with satpy.config.set(config_path="/single/string/paths/are/bad"): + with pytest.raises(ValueError, + match="Satpy config option 'config_path' must be a list, not ''"): + satpy._config.get_config_path_safe() def test_tmp_dir_is_writable(self): """Check that the default temporary directory is writable.""" @@ -503,7 +505,7 @@ def _is_writable(directory): def _os_specific_multipaths(): - exp_paths = ['/my/configs1', '/my/configs2', '/my/configs3'] + exp_paths = ["/my/configs1", "/my/configs2", "/my/configs3"] if sys.platform.startswith("win"): exp_paths = ["C:" + p for p in exp_paths] path_str = os.pathsep.join(exp_paths) diff --git a/satpy/tests/test_crefl_utils.py b/satpy/tests/test_crefl_utils.py index 1e5da8cd9a..57eb4f84a6 100644 --- a/satpy/tests/test_crefl_utils.py +++ b/satpy/tests/test_crefl_utils.py @@ -33,7 +33,7 @@ def test_get_atm_variables_abi(self): 0.0043149700000000004, 0.0037296, 0.014107995000000002, 0.052349, ) sphalb, rhoray, TtotraytH2O, tOG = atm_vars() - self.assertLess(abs(np.array(sphalb) - 0.045213532544630494), 1e-10) - self.assertLess(abs(rhoray - 2.2030281148621356), 1e-10) - self.assertLess(abs(TtotraytH2O - 0.30309880915889087), 1e-10) - self.assertLess(abs(tOG - 0.5969089524560548), 1e-10) + assert abs(np.array(sphalb) - 0.045213532544630494) < 1e-10 + assert abs(rhoray - 2.2030281148621356) < 1e-10 + assert abs(TtotraytH2O - 0.30309880915889087) < 1e-10 + assert abs(tOG - 0.5969089524560548) < 1e-10 diff --git a/satpy/tests/test_data_download.py b/satpy/tests/test_data_download.py index 8f2984bd9b..78edf180af 100644 --- a/satpy/tests/test_data_download.py +++ b/satpy/tests/test_data_download.py @@ -42,16 +42,16 @@ def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwar if not prerequisites or len(prerequisites) != 1: raise ValueError("Unexpected number of prereqs") super().__init__(name, prerequisites, optional_prerequisites, **kwargs) - self.register_data_files({'url': kwargs['url'], - 'filename': kwargs['filename'], - 'known_hash': kwargs['known_hash']}) + self.register_data_files({"url": kwargs["url"], + "filename": kwargs["filename"], + "known_hash": kwargs["known_hash"]}) def _setup_custom_composite_config(base_dir): from satpy.composites import StaticImageCompositor from satpy.modifiers.atmosphere import ReflectanceCorrector composite_config = base_dir.mkdir("composites").join("visir.yaml") - with open(composite_config, 'w') as comp_file: + with open(composite_config, "w") as comp_file: yaml.dump({ "sensor_name": "visir", "modifiers": { @@ -79,7 +79,7 @@ def _setup_custom_composite_config(base_dir): def _setup_custom_reader_config(base_dir): reader_config = base_dir.mkdir("readers").join("fake.yaml") - with open(reader_config, 'wt') as comp_file: + with open(reader_config, "wt") as comp_file: # abstract base classes can't be converted so we do raw string comp_file.write(""" reader: @@ -97,7 +97,7 @@ def _setup_custom_reader_config(base_dir): def _setup_custom_writer_config(base_dir): writer_config = base_dir.mkdir("writers").join("fake.yaml") - with open(writer_config, 'wt') as comp_file: + with open(writer_config, "wt") as comp_file: # abstract base classes can't be converted so we do raw string comp_file.write(""" writer: @@ -113,8 +113,8 @@ def _setup_custom_writer_config(base_dir): def _assert_reader_files_downloaded(readers, found_files): - r_cond1 = 'readers/README.rst' in found_files - r_cond2 = 'readers/README2.rst' in found_files + r_cond1 = "readers/README.rst" in found_files + r_cond2 = "readers/README2.rst" in found_files if readers is not None and not readers: r_cond1 = not r_cond1 r_cond2 = not r_cond2 @@ -123,8 +123,8 @@ def _assert_reader_files_downloaded(readers, found_files): def _assert_writer_files_downloaded(writers, found_files): - w_cond1 = 'writers/README.rst' in found_files - w_cond2 = 'writers/README2.rst' in found_files + w_cond1 = "writers/README.rst" in found_files + w_cond2 = "writers/README2.rst" in found_files if writers is not None and not writers: w_cond1 = not w_cond1 w_cond2 = not w_cond2 @@ -133,15 +133,15 @@ def _assert_writer_files_downloaded(writers, found_files): def _assert_comp_files_downloaded(comp_sensors, found_files): - comp_cond = 'composites/README.rst' in found_files + comp_cond = "composites/README.rst" in found_files if comp_sensors is not None and not comp_sensors: comp_cond = not comp_cond assert comp_cond def _assert_mod_files_downloaded(comp_sensors, found_files): - mod_cond = 'modifiers/README.rst' in found_files - unfriendly_cond = 'modifiers/unfriendly.rst' in found_files + mod_cond = "modifiers/README.rst" in found_files + unfriendly_cond = "modifiers/unfriendly.rst" in found_files if comp_sensors is not None and not comp_sensors: mod_cond = not mod_cond assert mod_cond @@ -158,15 +158,15 @@ def _setup_custom_configs(self, tmpdir): _setup_custom_writer_config(tmpdir) self.tmpdir = tmpdir - @pytest.mark.parametrize('comp_sensors', [[], None, ['visir']]) - @pytest.mark.parametrize('writers', [[], None, ['fake']]) - @pytest.mark.parametrize('readers', [[], None, ['fake']]) + @pytest.mark.parametrize("comp_sensors", [tuple(), None, ("visir",)]) + @pytest.mark.parametrize("writers", [[], None, ["fake"]]) + @pytest.mark.parametrize("readers", [[], None, ["fake"]]) def test_find_registerable(self, readers, writers, comp_sensors): """Test that find_registerable finds some things.""" import satpy from satpy.aux_download import find_registerable_files with satpy.config.set(config_path=[self.tmpdir]), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', {}): + mock.patch("satpy.aux_download._FILE_REGISTRY", {}): found_files = find_registerable_files( readers=readers, writers=writers, composite_sensors=comp_sensors, @@ -183,7 +183,7 @@ def test_limited_find_registerable(self): from satpy.aux_download import find_registerable_files file_registry = {} with satpy.config.set(config_path=[self.tmpdir]), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry): + mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry): found_files = find_registerable_files( readers=[], writers=[], composite_sensors=[], ) @@ -195,8 +195,8 @@ def test_retrieve(self): from satpy.aux_download import find_registerable_files, retrieve file_registry = {} with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir)), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry): - comp_file = 'composites/README.rst' + mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry): + comp_file = "composites/README.rst" found_files = find_registerable_files() assert comp_file in found_files assert not self.tmpdir.join(comp_file).exists() @@ -209,8 +209,8 @@ def test_offline_retrieve(self): from satpy.aux_download import find_registerable_files, retrieve file_registry = {} with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir), download_aux=True), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry): - comp_file = 'composites/README.rst' + mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry): + comp_file = "composites/README.rst" found_files = find_registerable_files() assert comp_file in found_files @@ -242,10 +242,10 @@ def test_retrieve_all(self): file_registry = {} file_urls = {} with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir)), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry), \ - mock.patch('satpy.aux_download._FILE_URLS', file_urls), \ - mock.patch('satpy.aux_download.find_registerable_files'): - comp_file = 'composites/README.rst' + mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry), \ + mock.patch("satpy.aux_download._FILE_URLS", file_urls), \ + mock.patch("satpy.aux_download.find_registerable_files"): + comp_file = "composites/README.rst" file_registry[comp_file] = None file_urls[comp_file] = README_URL assert not self.tmpdir.join(comp_file).exists() @@ -260,13 +260,13 @@ def test_no_downloads_in_tests(self): file_registry = {} with satpy.config.set(config_path=[self.tmpdir], data_dir=str(self.tmpdir), download_aux=True), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry): - cache_key = 'myfile.rst' + mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry): + cache_key = "myfile.rst" register_file(README_URL, cache_key) assert not self.tmpdir.join(cache_key).exists() pytest.raises(RuntimeError, retrieve, cache_key) # touch the file so it gets created - open(self.tmpdir.join(cache_key), 'w').close() + open(self.tmpdir.join(cache_key), "w").close() # offline downloading should still be allowed with satpy.config.set(download_aux=False): retrieve(cache_key) @@ -278,10 +278,10 @@ def test_download_script(self): file_registry = {} file_urls = {} with satpy.config.set(config_path=[self.tmpdir]), \ - mock.patch('satpy.aux_download._FILE_REGISTRY', file_registry), \ - mock.patch('satpy.aux_download._FILE_URLS', file_urls), \ - mock.patch('satpy.aux_download.find_registerable_files'): - comp_file = 'composites/README.rst' + mock.patch("satpy.aux_download._FILE_REGISTRY", file_registry), \ + mock.patch("satpy.aux_download._FILE_URLS", file_urls), \ + mock.patch("satpy.aux_download.find_registerable_files"): + comp_file = "composites/README.rst" file_registry[comp_file] = None file_urls[comp_file] = README_URL assert not self.tmpdir.join(comp_file).exists() diff --git a/satpy/tests/test_dataset.py b/satpy/tests/test_dataset.py index 0bc1de2982..014a450e0c 100644 --- a/satpy/tests/test_dataset.py +++ b/satpy/tests/test_dataset.py @@ -36,19 +36,19 @@ def test_basic_init(self): from satpy.dataset.dataid import minimal_default_keys_config as mdkc did = DataID(dikc, name="a") - assert did['name'] == 'a' - assert did['modifiers'] == tuple() + assert did["name"] == "a" + assert did["modifiers"] == tuple() DataID(dikc, name="a", wavelength=0.86) DataID(dikc, name="a", resolution=1000) - DataID(dikc, name="a", calibration='radiance') + DataID(dikc, name="a", calibration="radiance") DataID(dikc, name="a", wavelength=0.86, resolution=250, - calibration='radiance') + calibration="radiance") DataID(dikc, name="a", wavelength=0.86, resolution=250, - calibration='radiance', modifiers=('sunz_corrected',)) - with pytest.raises(ValueError): + calibration="radiance", modifiers=("sunz_corrected",)) + with pytest.raises(ValueError, match="Required field name missing."): DataID(dikc, wavelength=0.86) - did = DataID(mdkc, name='comp24', resolution=500) - assert did['resolution'] == 500 + did = DataID(mdkc, name="comp24", resolution=500) + assert did["resolution"] == 500 def test_init_bad_modifiers(self): """Test that modifiers are a tuple.""" @@ -64,21 +64,21 @@ def test_compare_no_wl(self): d2 = DataID(dikc, name="a", wavelength=None) # this happens when sorting IDs during dependency checks - self.assertFalse(d1 < d2) - self.assertTrue(d2 < d1) + assert not (d1 < d2) + assert d2 < d1 def test_bad_calibration(self): """Test that asking for a bad calibration fails.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc - with pytest.raises(ValueError): - DataID(dikc, name='C05', calibration='_bad_') + with pytest.raises(ValueError, match="_bad_ invalid value for "): + DataID(dikc, name="C05", calibration="_bad_") def test_is_modified(self): """Test that modifications are detected properly.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc - d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=('hej',)) + d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=("hej",)) d2 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=tuple()) assert d1.is_modified() @@ -88,11 +88,11 @@ def test_create_less_modified_query(self): """Test that modifications are popped correctly.""" from satpy.dataset.dataid import DataID from satpy.dataset.dataid import default_id_keys_config as dikc - d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=('hej',)) + d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=("hej",)) d2 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=tuple()) - assert not d1.create_less_modified_query()['modifiers'] - assert not d2.create_less_modified_query()['modifiers'] + assert not d1.create_less_modified_query()["modifiers"] + assert not d2.create_less_modified_query()["modifiers"] class TestCombineMetadata(unittest.TestCase): @@ -101,11 +101,11 @@ class TestCombineMetadata(unittest.TestCase): def setUp(self): """Set up the test case.""" self.datetime_dts = ( - {'start_time': datetime(2018, 2, 1, 11, 58, 0)}, - {'start_time': datetime(2018, 2, 1, 11, 59, 0)}, - {'start_time': datetime(2018, 2, 1, 12, 0, 0)}, - {'start_time': datetime(2018, 2, 1, 12, 1, 0)}, - {'start_time': datetime(2018, 2, 1, 12, 2, 0)}, + {"start_time": datetime(2018, 2, 1, 11, 58, 0)}, + {"start_time": datetime(2018, 2, 1, 11, 59, 0)}, + {"start_time": datetime(2018, 2, 1, 12, 0, 0)}, + {"start_time": datetime(2018, 2, 1, 12, 1, 0)}, + {"start_time": datetime(2018, 2, 1, 12, 2, 0)}, ) def test_average_datetimes(self): @@ -119,20 +119,20 @@ def test_average_datetimes(self): datetime(2018, 2, 1, 12, 2, 0), ) ret = average_datetimes(dts) - self.assertEqual(dts[2], ret) + assert dts[2] == ret def test_combine_times_with_averaging(self): """Test the combine_metadata with times with averaging.""" from satpy.dataset.metadata import combine_metadata ret = combine_metadata(*self.datetime_dts) - self.assertEqual(self.datetime_dts[2]['start_time'], ret['start_time']) + assert self.datetime_dts[2]["start_time"] == ret["start_time"] def test_combine_times_without_averaging(self): """Test the combine_metadata with times without averaging.""" from satpy.dataset.metadata import combine_metadata ret = combine_metadata(*self.datetime_dts, average_times=False) # times are not equal so don't include it in the final result - self.assertNotIn('start_time', ret) + assert "start_time" not in ret def test_combine_arrays(self): """Test the combine_metadata with arrays.""" @@ -175,44 +175,44 @@ def test_combine_lists_identical(self): """Test combine metadata with identical lists.""" from satpy.dataset.metadata import combine_metadata metadatas = [ - {'prerequisites': [1, 2, 3, 4]}, - {'prerequisites': [1, 2, 3, 4]}, + {"prerequisites": [1, 2, 3, 4]}, + {"prerequisites": [1, 2, 3, 4]}, ] res = combine_metadata(*metadatas) - assert res['prerequisites'] == [1, 2, 3, 4] + assert res["prerequisites"] == [1, 2, 3, 4] def test_combine_lists_same_size_diff_values(self): """Test combine metadata with lists with different values.""" from satpy.dataset.metadata import combine_metadata metadatas = [ - {'prerequisites': [1, 2, 3, 4]}, - {'prerequisites': [1, 2, 3, 5]}, + {"prerequisites": [1, 2, 3, 4]}, + {"prerequisites": [1, 2, 3, 5]}, ] res = combine_metadata(*metadatas) - assert 'prerequisites' not in res + assert "prerequisites" not in res def test_combine_lists_different_size(self): """Test combine metadata with different size lists.""" from satpy.dataset.metadata import combine_metadata metadatas = [ - {'prerequisites': [1, 2, 3, 4]}, - {'prerequisites': []}, + {"prerequisites": [1, 2, 3, 4]}, + {"prerequisites": []}, ] res = combine_metadata(*metadatas) - assert 'prerequisites' not in res + assert "prerequisites" not in res metadatas = [ - {'prerequisites': [1, 2, 3, 4]}, - {'prerequisites': [1, 2, 3]}, + {"prerequisites": [1, 2, 3, 4]}, + {"prerequisites": [1, 2, 3]}, ] res = combine_metadata(*metadatas) - assert 'prerequisites' not in res + assert "prerequisites" not in res def test_combine_identical_numpy_scalars(self): """Test combining identical fill values.""" from satpy.dataset.metadata import combine_metadata - test_metadata = [{'_FillValue': np.uint16(42)}, {'_FillValue': np.uint16(42)}] - assert combine_metadata(*test_metadata) == {'_FillValue': 42} + test_metadata = [{"_FillValue": np.uint16(42)}, {"_FillValue": np.uint16(42)}] + assert combine_metadata(*test_metadata) == {"_FillValue": 42} def test_combine_empty_metadata(self): """Test combining empty metadata.""" @@ -223,96 +223,96 @@ def test_combine_empty_metadata(self): def test_combine_nans(self): """Test combining nan fill values.""" from satpy.dataset.metadata import combine_metadata - test_metadata = [{'_FillValue': np.nan}, {'_FillValue': np.nan}] - assert combine_metadata(*test_metadata) == {'_FillValue': np.nan} + test_metadata = [{"_FillValue": np.nan}, {"_FillValue": np.nan}] + assert combine_metadata(*test_metadata) == {"_FillValue": np.nan} def test_combine_numpy_arrays(self): """Test combining values that are numpy arrays.""" from satpy.dataset.metadata import combine_metadata - test_metadata = [{'valid_range': np.array([0., 0.00032], dtype=np.float32)}, - {'valid_range': np.array([0., 0.00032], dtype=np.float32)}, - {'valid_range': np.array([0., 0.00032], dtype=np.float32)}] + test_metadata = [{"valid_range": np.array([0., 0.00032], dtype=np.float32)}, + {"valid_range": np.array([0., 0.00032], dtype=np.float32)}, + {"valid_range": np.array([0., 0.00032], dtype=np.float32)}] result = combine_metadata(*test_metadata) - assert np.allclose(result['valid_range'], np.array([0., 0.00032], dtype=np.float32)) + assert np.allclose(result["valid_range"], np.array([0., 0.00032], dtype=np.float32)) def test_combine_dask_arrays(self): """Test combining values that are dask arrays.""" import dask.array as da from satpy.dataset.metadata import combine_metadata - test_metadata = [{'valid_range': da.from_array(np.array([0., 0.00032], dtype=np.float32))}, - {'valid_range': da.from_array(np.array([0., 0.00032], dtype=np.float32))}] + test_metadata = [{"valid_range": da.from_array(np.array([0., 0.00032], dtype=np.float32))}, + {"valid_range": da.from_array(np.array([0., 0.00032], dtype=np.float32))}] result = combine_metadata(*test_metadata) - assert 'valid_range' not in result + assert "valid_range" not in result def test_combine_real_world_mda(self): """Test with real data.""" - mda_objects = ({'_FillValue': np.nan, - 'valid_range': np.array([0., 0.00032], dtype=np.float32), - 'ancillary_variables': ['cpp_status_flag', - 'cpp_conditions', - 'cpp_quality', - 'cpp_reff_pal', - '-'], - 'platform_name': 'NOAA-20', - 'sensor': {'viirs'}, - 'raw_metadata': {'foo': {'bar': np.array([1, 2, 3])}}}, - {'_FillValue': np.nan, - 'valid_range': np.array([0., 0.00032], dtype=np.float32), - 'ancillary_variables': ['cpp_status_flag', - 'cpp_conditions', - 'cpp_quality', - 'cpp_reff_pal', - '-'], - 'platform_name': 'NOAA-20', - 'sensor': {'viirs'}, - 'raw_metadata': {'foo': {'bar': np.array([1, 2, 3])}}}) - - expected = {'_FillValue': np.nan, - 'valid_range': np.array([0., 0.00032], dtype=np.float32), - 'ancillary_variables': ['cpp_status_flag', - 'cpp_conditions', - 'cpp_quality', - 'cpp_reff_pal', - '-'], - 'platform_name': 'NOAA-20', - 'sensor': {'viirs'}, - 'raw_metadata': {'foo': {'bar': np.array([1, 2, 3])}}} + mda_objects = ({"_FillValue": np.nan, + "valid_range": np.array([0., 0.00032], dtype=np.float32), + "ancillary_variables": ["cpp_status_flag", + "cpp_conditions", + "cpp_quality", + "cpp_reff_pal", + "-"], + "platform_name": "NOAA-20", + "sensor": {"viirs"}, + "raw_metadata": {"foo": {"bar": np.array([1, 2, 3])}}}, + {"_FillValue": np.nan, + "valid_range": np.array([0., 0.00032], dtype=np.float32), + "ancillary_variables": ["cpp_status_flag", + "cpp_conditions", + "cpp_quality", + "cpp_reff_pal", + "-"], + "platform_name": "NOAA-20", + "sensor": {"viirs"}, + "raw_metadata": {"foo": {"bar": np.array([1, 2, 3])}}}) + + expected = {"_FillValue": np.nan, + "valid_range": np.array([0., 0.00032], dtype=np.float32), + "ancillary_variables": ["cpp_status_flag", + "cpp_conditions", + "cpp_quality", + "cpp_reff_pal", + "-"], + "platform_name": "NOAA-20", + "sensor": {"viirs"}, + "raw_metadata": {"foo": {"bar": np.array([1, 2, 3])}}} from satpy.dataset.metadata import combine_metadata result = combine_metadata(*mda_objects) - assert np.allclose(result.pop('_FillValue'), expected.pop('_FillValue'), equal_nan=True) - assert np.allclose(result.pop('valid_range'), expected.pop('valid_range')) - np.testing.assert_equal(result.pop('raw_metadata'), - expected.pop('raw_metadata')) + assert np.allclose(result.pop("_FillValue"), expected.pop("_FillValue"), equal_nan=True) + assert np.allclose(result.pop("valid_range"), expected.pop("valid_range")) + np.testing.assert_equal(result.pop("raw_metadata"), + expected.pop("raw_metadata")) assert result == expected def test_combine_one_metadata_object(self): """Test combining one metadata object.""" - mda_objects = ({'_FillValue': np.nan, - 'valid_range': np.array([0., 0.00032], dtype=np.float32), - 'ancillary_variables': ['cpp_status_flag', - 'cpp_conditions', - 'cpp_quality', - 'cpp_reff_pal', - '-'], - 'platform_name': 'NOAA-20', - 'sensor': {'viirs'}},) - - expected = {'_FillValue': np.nan, - 'valid_range': np.array([0., 0.00032], dtype=np.float32), - 'ancillary_variables': ['cpp_status_flag', - 'cpp_conditions', - 'cpp_quality', - 'cpp_reff_pal', - '-'], - 'platform_name': 'NOAA-20', - 'sensor': {'viirs'}} + mda_objects = ({"_FillValue": np.nan, + "valid_range": np.array([0., 0.00032], dtype=np.float32), + "ancillary_variables": ["cpp_status_flag", + "cpp_conditions", + "cpp_quality", + "cpp_reff_pal", + "-"], + "platform_name": "NOAA-20", + "sensor": {"viirs"}},) + + expected = {"_FillValue": np.nan, + "valid_range": np.array([0., 0.00032], dtype=np.float32), + "ancillary_variables": ["cpp_status_flag", + "cpp_conditions", + "cpp_quality", + "cpp_reff_pal", + "-"], + "platform_name": "NOAA-20", + "sensor": {"viirs"}} from satpy.dataset.metadata import combine_metadata result = combine_metadata(*mda_objects) - assert np.allclose(result.pop('_FillValue'), expected.pop('_FillValue'), equal_nan=True) - assert np.allclose(result.pop('valid_range'), expected.pop('valid_range')) + assert np.allclose(result.pop("_FillValue"), expected.pop("_FillValue"), equal_nan=True) + assert np.allclose(result.pop("valid_range"), expected.pop("valid_range")) assert result == expected @@ -320,29 +320,29 @@ def test_combine_dicts_close(): """Test combination of dictionaries whose values are close.""" from satpy.dataset.metadata import combine_metadata attrs = { - 'raw_metadata': { - 'a': 1, - 'b': 'foo', - 'c': [1, 2, 3], - 'd': { - 'e': np.str_('bar'), - 'f': datetime(2020, 1, 1, 12, 15, 30), - 'g': np.array([1, 2, 3]), + "raw_metadata": { + "a": 1, + "b": "foo", + "c": [1, 2, 3], + "d": { + "e": np.str_("bar"), + "f": datetime(2020, 1, 1, 12, 15, 30), + "g": np.array([1, 2, 3]), }, - 'h': np.array([datetime(2020, 1, 1), datetime(2020, 1, 1)]) + "h": np.array([datetime(2020, 1, 1), datetime(2020, 1, 1)]) } } attrs_close = { - 'raw_metadata': { - 'a': 1 + 1E-12, - 'b': 'foo', - 'c': np.array([1, 2, 3]) + 1E-12, - 'd': { - 'e': np.str_('bar'), - 'f': datetime(2020, 1, 1, 12, 15, 30), - 'g': np.array([1, 2, 3]) + 1E-12 + "raw_metadata": { + "a": 1 + 1E-12, + "b": "foo", + "c": np.array([1, 2, 3]) + 1E-12, + "d": { + "e": np.str_("bar"), + "f": datetime(2020, 1, 1, 12, 15, 30), + "g": np.array([1, 2, 3]) + 1E-12 }, - 'h': np.array([datetime(2020, 1, 1), datetime(2020, 1, 1)]) + "h": np.array([datetime(2020, 1, 1), datetime(2020, 1, 1)]) } } test_metadata = [attrs, attrs_close] @@ -354,22 +354,22 @@ def test_combine_dicts_close(): "test_mda", [ # a/b/c/d different - {'a': np.array([1, 2, 3]), 'd': 123}, - {'a': {'b': np.array([4, 5, 6]), 'c': 1.0}, 'd': 'foo'}, - {'a': {'b': np.array([1, 2, 3]), 'c': 2.0}, 'd': 'foo'}, - {'a': {'b': np.array([1, 2, 3]), 'c': 1.0}, 'd': 'bar'}, + {"a": np.array([1, 2, 3]), "d": 123}, + {"a": {"b": np.array([4, 5, 6]), "c": 1.0}, "d": "foo"}, + {"a": {"b": np.array([1, 2, 3]), "c": 2.0}, "d": "foo"}, + {"a": {"b": np.array([1, 2, 3]), "c": 1.0}, "d": "bar"}, # a/b/c/d type different np.array([1, 2, 3]), - {'a': {'b': 'baz', 'c': 1.0}, 'd': 'foo'}, - {'a': {'b': np.array([1, 2, 3]), 'c': 'baz'}, 'd': 'foo'}, - {'a': {'b': np.array([1, 2, 3]), 'c': 1.0}, 'd': 1.0} + {"a": {"b": "baz", "c": 1.0}, "d": "foo"}, + {"a": {"b": np.array([1, 2, 3]), "c": "baz"}, "d": "foo"}, + {"a": {"b": np.array([1, 2, 3]), "c": 1.0}, "d": 1.0} ] ) def test_combine_dicts_different(test_mda): """Test combination of dictionaries differing in various ways.""" from satpy.dataset.metadata import combine_metadata - mda = {'a': {'b': np.array([1, 2, 3]), 'c': 1.0}, 'd': 'foo'} - test_metadata = [{'raw_metadata': mda}, {'raw_metadata': test_mda}] + mda = {"a": {"b": np.array([1, 2, 3]), "c": 1.0}, "d": "foo"} + test_metadata = [{"raw_metadata": mda}, {"raw_metadata": test_mda}] result = combine_metadata(*test_metadata) assert not result @@ -380,57 +380,57 @@ def test_dataid(): # Check that enum is translated to type. did = make_dataid() - assert issubclass(did._id_keys['calibration']['type'], ValueList) - assert 'enum' not in did._id_keys['calibration'] + assert issubclass(did._id_keys["calibration"]["type"], ValueList) + assert "enum" not in did._id_keys["calibration"] # Check that None is never a valid value - did = make_dataid(name='cheese_shops', resolution=None) - assert 'resolution' not in did - assert 'None' not in did.__repr__() - with pytest.raises(ValueError): + did = make_dataid(name="cheese_shops", resolution=None) + assert "resolution" not in did + assert "None" not in did.__repr__() + with pytest.raises(ValueError, match="Required field name missing."): make_dataid(name=None, resolution=1000) # Check that defaults are applied correctly - assert did['modifiers'] == ModifierTuple() + assert did["modifiers"] == ModifierTuple() # Check that from_dict creates a distinct instance... - did2 = did.from_dict(dict(name='cheese_shops', resolution=None)) + did2 = did.from_dict(dict(name="cheese_shops", resolution=None)) assert did is not did2 # ...But is equal assert did2 == did # Check that the instance is immutable with pytest.raises(TypeError): - did['resolution'] = 1000 + did["resolution"] = 1000 # Check that a missing required field crashes - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Required field name missing."): make_dataid(resolution=1000) # Check to_dict - assert did.to_dict() == dict(name='cheese_shops', modifiers=tuple()) + assert did.to_dict() == dict(name="cheese_shops", modifiers=tuple()) # Check repr - did = make_dataid(name='VIS008', resolution=111) + did = make_dataid(name="VIS008", resolution=111) assert repr(did) == "DataID(name='VIS008', resolution=111, modifiers=())" # Check inequality - default_id_keys_config = {'name': None, - 'wavelength': { - 'type': WavelengthRange, + default_id_keys_config = {"name": None, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'reflectance', - 'brightness_temperature', - 'radiance', - 'counts' + "resolution": None, + "calibration": { + "enum": [ + "reflectance", + "brightness_temperature", + "radiance", + "counts" ] }, - 'modifiers': { - 'default': ModifierTuple(), - 'type': ModifierTuple, + "modifiers": { + "default": ModifierTuple(), + "type": ModifierTuple, }, } assert DataID(default_id_keys_config, wavelength=10) != DataID(default_id_keys_config, name="VIS006") @@ -439,44 +439,44 @@ def test_dataid(): def test_dataid_equal_if_enums_different(): """Check that dataids with different enums but same items are equal.""" from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange - id_keys_config1 = {'name': None, - 'wavelength': { - 'type': WavelengthRange, + id_keys_config1 = {"name": None, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'c1', - 'c2', - 'c3', + "resolution": None, + "calibration": { + "enum": [ + "c1", + "c2", + "c3", ] }, - 'modifiers': { - 'default': ModifierTuple(), - 'type': ModifierTuple, + "modifiers": { + "default": ModifierTuple(), + "type": ModifierTuple, }, } - id_keys_config2 = {'name': None, - 'wavelength': { - 'type': WavelengthRange, + id_keys_config2 = {"name": None, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'c1', - 'c1.5', - 'c2', - 'c2.5', - 'c3' + "resolution": None, + "calibration": { + "enum": [ + "c1", + "c1.5", + "c2", + "c2.5", + "c3" ] }, - 'modifiers': { - 'default': ModifierTuple(), - 'type': ModifierTuple, + "modifiers": { + "default": ModifierTuple(), + "type": ModifierTuple, }, } - assert DataID(id_keys_config1, name='ni', calibration='c2') == DataID(id_keys_config2, name="ni", calibration='c2') + assert DataID(id_keys_config1, name="ni", calibration="c2") == DataID(id_keys_config2, name="ni", calibration="c2") def test_dataid_copy(): @@ -497,7 +497,7 @@ def test_dataid_pickle(): import pickle from satpy.tests.utils import make_dataid - did = make_dataid(name='hi', wavelength=(10, 11, 12), resolution=1000, calibration='radiance') + did = make_dataid(name="hi", wavelength=(10, 11, 12), resolution=1000, calibration="radiance") assert did == pickle.loads(pickle.dumps(did)) @@ -512,7 +512,7 @@ def test_dataid_elements_picklable(): import pickle from satpy.tests.utils import make_dataid - did = make_dataid(name='hi', wavelength=(10, 11, 12), resolution=1000, calibration='radiance') + did = make_dataid(name="hi", wavelength=(10, 11, 12), resolution=1000, calibration="radiance") for value in did.values(): pickled_value = pickle.loads(pickle.dumps(value)) assert value == pickled_value @@ -525,10 +525,10 @@ def test_dataquery(self): """Test DataQuery objects.""" from satpy.dataset import DataQuery - DataQuery(name='cheese_shops') + DataQuery(name="cheese_shops") # Check repr - did = DataQuery(name='VIS008', resolution=111) + did = DataQuery(name="VIS008", resolution=111) assert repr(did) == "DataQuery(name='VIS008', resolution=111)" # Check inequality @@ -537,7 +537,7 @@ def test_dataquery(self): def test_is_modified(self): """Test that modifications are detected properly.""" from satpy.dataset import DataQuery - d1 = DataQuery(name="a", wavelength=0.2, modifiers=('hej',)) + d1 = DataQuery(name="a", wavelength=0.2, modifiers=("hej",)) d2 = DataQuery(name="a", wavelength=0.2, modifiers=tuple()) assert d1.is_modified() @@ -546,11 +546,11 @@ def test_is_modified(self): def test_create_less_modified_query(self): """Test that modifications are popped correctly.""" from satpy.dataset import DataQuery - d1 = DataQuery(name="a", wavelength=0.2, modifiers=('hej',)) + d1 = DataQuery(name="a", wavelength=0.2, modifiers=("hej",)) d2 = DataQuery(name="a", wavelength=0.2, modifiers=tuple()) - assert not d1.create_less_modified_query()['modifiers'] - assert not d2.create_less_modified_query()['modifiers'] + assert not d1.create_less_modified_query()["modifiers"] + assert not d2.create_less_modified_query()["modifiers"] class TestIDQueryInteractions(unittest.TestCase): @@ -559,56 +559,56 @@ class TestIDQueryInteractions(unittest.TestCase): def setUp(self) -> None: """Set up the test case.""" self.default_id_keys_config = { - 'name': { - 'required': True, + "name": { + "required": True, }, - 'wavelength': { - 'type': WavelengthRange, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'reflectance', - 'brightness_temperature', - 'radiance', - 'counts' + "resolution": None, + "calibration": { + "enum": [ + "reflectance", + "brightness_temperature", + "radiance", + "counts" ] }, - 'modifiers': { - 'default': ModifierTuple(), - 'type': ModifierTuple, + "modifiers": { + "default": ModifierTuple(), + "type": ModifierTuple, }, } def test_hash_equality(self): """Test hash equality.""" - dq = DataQuery(modifiers=tuple(), name='cheese_shops') - did = DataID(self.default_id_keys_config, name='cheese_shops') + dq = DataQuery(modifiers=tuple(), name="cheese_shops") + did = DataID(self.default_id_keys_config, name="cheese_shops") assert hash(dq) == hash(did) def test_id_filtering(self): """Check did filtering.""" - dq = DataQuery(modifiers=tuple(), name='cheese_shops') - did = DataID(self.default_id_keys_config, name='cheese_shops') - did2 = DataID(self.default_id_keys_config, name='ni') + dq = DataQuery(modifiers=tuple(), name="cheese_shops") + did = DataID(self.default_id_keys_config, name="cheese_shops") + did2 = DataID(self.default_id_keys_config, name="ni") res = dq.filter_dataids([did2, did]) assert len(res) == 1 assert res[0] == did dataid_container = [DataID(self.default_id_keys_config, - name='ds1', + name="ds1", resolution=250, - calibration='reflectance', + calibration="reflectance", modifiers=tuple())] dq = DataQuery(wavelength=0.22, modifiers=tuple()) assert len(dq.filter_dataids(dataid_container)) == 0 dataid_container = [DataID(minimal_default_keys_config, - name='natural_color')] - dq = DataQuery(name='natural_color', resolution=250) + name="natural_color")] + dq = DataQuery(name="natural_color", resolution=250) assert len(dq.filter_dataids(dataid_container)) == 1 - dq = make_dsq(wavelength=0.22, modifiers=('mod1',)) - did = make_cid(name='static_image') + dq = make_dsq(wavelength=0.22, modifiers=("mod1",)) + did = make_cid(name="static_image") assert len(dq.filter_dataids([did])) == 0 def test_inequality(self): @@ -617,70 +617,70 @@ def test_inequality(self): def test_sort_dataids(self): """Check dataid sorting.""" - dq = DataQuery(name='cheese_shops', wavelength=2, modifiers='*') - did = DataID(self.default_id_keys_config, name='cheese_shops', wavelength=(1, 2, 3)) - did2 = DataID(self.default_id_keys_config, name='cheese_shops', wavelength=(1.1, 2.1, 3.1)) + dq = DataQuery(name="cheese_shops", wavelength=2, modifiers="*") + did = DataID(self.default_id_keys_config, name="cheese_shops", wavelength=(1, 2, 3)) + did2 = DataID(self.default_id_keys_config, name="cheese_shops", wavelength=(1.1, 2.1, 3.1)) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert np.allclose(distances, [0, 0.1]) - dq = DataQuery(name='cheese_shops') - did = DataID(self.default_id_keys_config, name='cheese_shops', resolution=200) - did2 = DataID(self.default_id_keys_config, name='cheese_shops', resolution=400) + dq = DataQuery(name="cheese_shops") + did = DataID(self.default_id_keys_config, name="cheese_shops", resolution=200) + did2 = DataID(self.default_id_keys_config, name="cheese_shops", resolution=400) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert distances[0] < distances[1] - did = DataID(self.default_id_keys_config, name='cheese_shops', calibration='counts') - did2 = DataID(self.default_id_keys_config, name='cheese_shops', calibration='reflectance') + did = DataID(self.default_id_keys_config, name="cheese_shops", calibration="counts") + did2 = DataID(self.default_id_keys_config, name="cheese_shops", calibration="reflectance") dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did2, did] assert distances[0] < distances[1] - did = DataID(self.default_id_keys_config, name='cheese_shops', modifiers=tuple()) - did2 = DataID(self.default_id_keys_config, name='cheese_shops', modifiers=tuple(['out_of_stock'])) + did = DataID(self.default_id_keys_config, name="cheese_shops", modifiers=tuple()) + did2 = DataID(self.default_id_keys_config, name="cheese_shops", modifiers=tuple(["out_of_stock"])) dsids, distances = dq.sort_dataids([did2, did]) assert list(dsids) == [did, did2] assert distances[0] < distances[1] def test_sort_dataids_with_different_set_of_keys(self): """Check sorting data ids when the query has a different set of keys.""" - dq = DataQuery(name='solar_zenith_angle', calibration='reflectance') - dids = [DataID(self.default_id_keys_config, name='solar_zenith_angle', resolution=1000, modifiers=()), - DataID(self.default_id_keys_config, name='solar_zenith_angle', resolution=500, modifiers=()), - DataID(self.default_id_keys_config, name='solar_zenith_angle', resolution=250, modifiers=())] + dq = DataQuery(name="solar_zenith_angle", calibration="reflectance") + dids = [DataID(self.default_id_keys_config, name="solar_zenith_angle", resolution=1000, modifiers=()), + DataID(self.default_id_keys_config, name="solar_zenith_angle", resolution=500, modifiers=()), + DataID(self.default_id_keys_config, name="solar_zenith_angle", resolution=250, modifiers=())] dsids, distances = dq.sort_dataids(dids) assert distances[0] < distances[1] assert distances[1] < distances[2] def test_seviri_hrv_has_priority_over_vis008(self): """Check that the HRV channel has priority over VIS008 when querying 0.8µm.""" - dids = [DataID(self.default_id_keys_config, name='HRV', - wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit='µm'), resolution=1000.134348869, + dids = [DataID(self.default_id_keys_config, name="HRV", + wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit="µm"), resolution=1000.134348869, calibration="reflectance", modifiers=()), - DataID(self.default_id_keys_config, name='HRV', - wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit='µm'), resolution=1000.134348869, + DataID(self.default_id_keys_config, name="HRV", + wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit="µm"), resolution=1000.134348869, calibration="radiance", modifiers=()), - DataID(self.default_id_keys_config, name='HRV', - wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit='µm'), resolution=1000.134348869, + DataID(self.default_id_keys_config, name="HRV", + wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit="µm"), resolution=1000.134348869, calibration="counts", modifiers=()), - DataID(self.default_id_keys_config, name='VIS006', - wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit='µm'), + DataID(self.default_id_keys_config, name="VIS006", + wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit="µm"), resolution=3000.403165817, calibration="reflectance", modifiers=()), - DataID(self.default_id_keys_config, name='VIS006', - wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit='µm'), + DataID(self.default_id_keys_config, name="VIS006", + wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit="µm"), resolution=3000.403165817, calibration="radiance", modifiers=()), - DataID(self.default_id_keys_config, name='VIS006', - wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit='µm'), + DataID(self.default_id_keys_config, name="VIS006", + wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit="µm"), resolution=3000.403165817, calibration="counts", modifiers=()), - DataID(self.default_id_keys_config, name='VIS008', - wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit='µm'), + DataID(self.default_id_keys_config, name="VIS008", + wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit="µm"), resolution=3000.403165817, calibration="reflectance", modifiers=()), - DataID(self.default_id_keys_config, name='VIS008', - wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit='µm'), + DataID(self.default_id_keys_config, name="VIS008", + wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit="µm"), resolution=3000.403165817, calibration="radiance", modifiers=()), - DataID(self.default_id_keys_config, name='VIS008', - wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit='µm'), + DataID(self.default_id_keys_config, name="VIS008", + wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit="µm"), resolution=3000.403165817, calibration="counts", modifiers=())] dq = DataQuery(wavelength=0.8) res, distances = dq.sort_dataids(dids) @@ -694,14 +694,14 @@ def test_frequency_quadruple_side_band_class_method_convert(): res = frq_qdsb.convert(57.37) assert res == 57.37 - res = frq_qdsb.convert({'central': 57.0, 'side': 0.322, 'sideside': 0.05, 'bandwidth': 0.036}) + res = frq_qdsb.convert({"central": 57.0, "side": 0.322, "sideside": 0.05, "bandwidth": 0.036}) assert res == FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.036) def test_frequency_quadruple_side_band_channel_str(): """Test the frequency quadruple side band object: test the band description.""" frq_qdsb1 = FrequencyQuadrupleSideBand(57.0, 0.322, 0.05, 0.036) - frq_qdsb2 = FrequencyQuadrupleSideBand(57000, 322, 50, 36, 'MHz') + frq_qdsb2 = FrequencyQuadrupleSideBand(57000, 322, 50, 36, "MHz") assert str(frq_qdsb1) == "central=57.0 GHz ±0.322 ±0.05 width=0.036 GHz" assert str(frq_qdsb2) == "central=57000 MHz ±322 ±50 width=36 MHz" @@ -735,8 +735,8 @@ def test_frequency_quadruple_side_band_channel_distances(): frq_qdsb = FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.036) mydist = frq_qdsb.distance([57, 0.322, 0.05, 0.036]) - frq_dict = {'central': 57, 'side': 0.322, 'sideside': 0.05, - 'bandwidth': 0.036, 'unit': 'GHz'} + frq_dict = {"central": 57, "side": 0.322, "sideside": 0.05, + "bandwidth": 0.036, "unit": "GHz"} mydist = frq_qdsb.distance(frq_dict) assert mydist == np.inf @@ -769,7 +769,7 @@ def test_frequency_quadruple_side_band_channel_containment(): frq_qdsb = None assert (frq_qdsb in FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.05)) is False - assert '57' not in FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.05) + assert "57" not in FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.05) def test_frequency_double_side_band_class_method_convert(): @@ -779,14 +779,14 @@ def test_frequency_double_side_band_class_method_convert(): res = frq_dsb.convert(185) assert res == 185 - res = frq_dsb.convert({'central': 185, 'side': 7, 'bandwidth': 2}) + res = frq_dsb.convert({"central": 185, "side": 7, "bandwidth": 2}) assert res == FrequencyDoubleSideBand(185, 7, 2) def test_frequency_double_side_band_channel_str(): """Test the frequency double side band object: test the band description.""" frq_dsb1 = FrequencyDoubleSideBand(183, 7, 2) - frq_dsb2 = FrequencyDoubleSideBand(183000, 7000, 2000, 'MHz') + frq_dsb2 = FrequencyDoubleSideBand(183000, 7000, 2000, "MHz") assert str(frq_dsb1) == "central=183 GHz ±7 width=2 GHz" assert str(frq_dsb2) == "central=183000 MHz ±7000 width=2000 MHz" @@ -846,12 +846,12 @@ def test_frequency_double_side_band_channel_containment(): assert frq_range not in FrequencyDoubleSideBand(183, 4, 2) with pytest.raises(NotImplementedError): - assert frq_range in FrequencyDoubleSideBand(183, 6.5, 3, 'MHz') + assert frq_range in FrequencyDoubleSideBand(183, 6.5, 3, "MHz") frq_range = None assert (frq_range in FrequencyDoubleSideBand(183, 3, 2)) is False - assert '183' not in FrequencyDoubleSideBand(183, 3, 2) + assert "183" not in FrequencyDoubleSideBand(183, 3, 2) def test_frequency_range_class_method_convert(): @@ -861,14 +861,14 @@ def test_frequency_range_class_method_convert(): res = frq_range.convert(89) assert res == 89 - res = frq_range.convert({'central': 89, 'bandwidth': 2}) + res = frq_range.convert({"central": 89, "bandwidth": 2}) assert res == FrequencyRange(89, 2) def test_frequency_range_class_method_str(): """Test the frequency range object: test the band description.""" frq_range1 = FrequencyRange(89, 2) - frq_range2 = FrequencyRange(89000, 2000, 'MHz') + frq_range2 = FrequencyRange(89000, 2000, "MHz") assert str(frq_range1) == "central=89 GHz width=2 GHz" assert str(frq_range2) == "central=89000 MHz width=2000 MHz" @@ -882,7 +882,7 @@ def test_frequency_range_channel_equality(): assert 1.2 != frqr assert frqr == (2, 1) - assert frqr == (2, 1, 'GHz') + assert frqr == (2, 1, "GHz") def test_frequency_range_channel_containment(): @@ -892,12 +892,12 @@ def test_frequency_range_channel_containment(): assert 2.8 not in frqr with pytest.raises(NotImplementedError): - assert frqr in FrequencyRange(89, 2, 'MHz') + assert frqr in FrequencyRange(89, 2, "MHz") frqr = None assert (frqr in FrequencyRange(89, 2)) is False - assert '89' not in FrequencyRange(89, 2) + assert "89" not in FrequencyRange(89, 2) def test_frequency_range_channel_distances(): @@ -920,7 +920,7 @@ def test_wavelength_range(): assert 1.2 == wr assert .9 != wr assert wr == (1, 2, 3) - assert wr == (1, 2, 3, 'µm') + assert wr == (1, 2, 3, "µm") # Check containement assert 1.2 in wr @@ -929,11 +929,11 @@ def test_wavelength_range(): assert WavelengthRange(1.1, 2.2, 3.3) not in wr assert WavelengthRange(1.2, 2, 2.8) in wr assert WavelengthRange(10, 20, 30) not in wr - assert 'bla' not in wr + assert "bla" not in wr assert None not in wr - wr2 = WavelengthRange(1, 2, 3, 'µm') + wr2 = WavelengthRange(1, 2, 3, "µm") assert wr2 in wr - wr2 = WavelengthRange(1, 2, 3, 'nm') + wr2 = WavelengthRange(1, 2, 3, "nm") with pytest.raises(NotImplementedError): wr2 in wr # noqa diff --git a/satpy/tests/test_demo.py b/satpy/tests/test_demo.py index 976b6bbd6e..32e8016f58 100644 --- a/satpy/tests/test_demo.py +++ b/satpy/tests/test_demo.py @@ -58,7 +58,7 @@ def __call__(self, pattern): except IndexError: num_results = self.num_results[-1] self.current_call += 1 - return [pattern + '.{:03d}'.format(idx) for idx in range(num_results)] + return [pattern + ".{:03d}".format(idx) for idx in range(num_results)] class TestDemo(unittest.TestCase): @@ -80,26 +80,26 @@ def tearDown(self): except OSError: pass - @mock.patch('satpy.demo._google_cloud_platform.gcsfs') + @mock.patch("satpy.demo._google_cloud_platform.gcsfs") def test_get_us_midlatitude_cyclone_abi(self, gcsfs_mod): """Test data download function.""" from satpy.demo import get_us_midlatitude_cyclone_abi gcsfs_mod.GCSFileSystem = mock.MagicMock() gcsfs_inst = mock.MagicMock() gcsfs_mod.GCSFileSystem.return_value = gcsfs_inst - gcsfs_inst.glob.return_value = ['a.nc', 'b.nc'] + gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] # expected 16 files, got 2 - self.assertRaises(AssertionError, get_us_midlatitude_cyclone_abi) + self.assertRaises(RuntimeError, get_us_midlatitude_cyclone_abi) # unknown access method - self.assertRaises(NotImplementedError, get_us_midlatitude_cyclone_abi, method='unknown') + self.assertRaises(NotImplementedError, get_us_midlatitude_cyclone_abi, method="unknown") - gcsfs_inst.glob.return_value = ['a.nc'] * 16 + gcsfs_inst.glob.return_value = ["a.nc"] * 16 filenames = get_us_midlatitude_cyclone_abi() - expected = os.path.join('.', 'abi_l1b', '20190314_us_midlatitude_cyclone', 'a.nc') + expected = os.path.join(".", "abi_l1b", "20190314_us_midlatitude_cyclone", "a.nc") for fn in filenames: - self.assertEqual(expected, fn) + assert expected == fn - @mock.patch('satpy.demo._google_cloud_platform.gcsfs') + @mock.patch("satpy.demo._google_cloud_platform.gcsfs") def test_get_hurricane_florence_abi(self, gcsfs_mod): """Test data download function.""" from satpy.demo import get_hurricane_florence_abi @@ -109,86 +109,86 @@ def test_get_hurricane_florence_abi(self, gcsfs_mod): # only return 5 results total gcsfs_inst.glob.side_effect = _GlobHelper([5, 0]) # expected 16 files * 10 frames, got 16 * 5 - self.assertRaises(AssertionError, get_hurricane_florence_abi) - self.assertRaises(NotImplementedError, get_hurricane_florence_abi, method='unknown') + self.assertRaises(RuntimeError, get_hurricane_florence_abi) + self.assertRaises(NotImplementedError, get_hurricane_florence_abi, method="unknown") gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi() - self.assertEqual(10 * 16, len(filenames)) + assert 10 * 16 == len(filenames) gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi(channels=[2, 3, 4]) - self.assertEqual(10 * 3, len(filenames)) + assert 10 * 3 == len(filenames) gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi(channels=[2, 3, 4], num_frames=5) - self.assertEqual(5 * 3, len(filenames)) + assert 5 * 3 == len(filenames) gcsfs_inst.glob.side_effect = _GlobHelper([int(240 / 16), 0, 0, 0] * 16) filenames = get_hurricane_florence_abi(num_frames=5) - self.assertEqual(5 * 16, len(filenames)) + assert 5 * 16 == len(filenames) class TestGCPUtils(unittest.TestCase): """Test Google Cloud Platform utilities.""" - @mock.patch('satpy.demo._google_cloud_platform.urlopen') + @mock.patch("satpy.demo._google_cloud_platform.urlopen") def test_is_gcp_instance(self, uo): """Test is_google_cloud_instance.""" from satpy.demo._google_cloud_platform import URLError, is_google_cloud_instance uo.side_effect = URLError("Test Environment") - self.assertFalse(is_google_cloud_instance()) + assert not is_google_cloud_instance() - @mock.patch('satpy.demo._google_cloud_platform.gcsfs') + @mock.patch("satpy.demo._google_cloud_platform.gcsfs") def test_get_bucket_files(self, gcsfs_mod): """Test get_bucket_files basic cases.""" from satpy.demo._google_cloud_platform import get_bucket_files gcsfs_mod.GCSFileSystem = mock.MagicMock() gcsfs_inst = mock.MagicMock() gcsfs_mod.GCSFileSystem.return_value = gcsfs_inst - gcsfs_inst.glob.return_value = ['a.nc', 'b.nc'] - filenames = get_bucket_files('*.nc', '.') - expected = [os.path.join('.', 'a.nc'), os.path.join('.', 'b.nc')] - self.assertEqual(expected, filenames) + gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] + filenames = get_bucket_files("*.nc", ".") + expected = [os.path.join(".", "a.nc"), os.path.join(".", "b.nc")] + assert expected == filenames gcsfs_inst.glob.side_effect = _GlobHelper(10) - filenames = get_bucket_files(['*.nc', '*.txt'], '.', pattern_slice=slice(2, 5)) - self.assertEqual(len(filenames), 3 * 2) + filenames = get_bucket_files(["*.nc", "*.txt"], ".", pattern_slice=slice(2, 5)) + assert len(filenames) == 3 * 2 gcsfs_inst.glob.side_effect = None # reset mock side effect - gcsfs_inst.glob.return_value = ['a.nc', 'b.nc'] - self.assertRaises(OSError, get_bucket_files, '*.nc', 'does_not_exist') + gcsfs_inst.glob.return_value = ["a.nc", "b.nc"] + self.assertRaises(OSError, get_bucket_files, "*.nc", "does_not_exist") - open('a.nc', 'w').close() # touch the file + open("a.nc", "w").close() # touch the file gcsfs_inst.get.reset_mock() - gcsfs_inst.glob.return_value = ['a.nc'] - filenames = get_bucket_files('*.nc', '.') - self.assertEqual([os.path.join('.', 'a.nc')], filenames) + gcsfs_inst.glob.return_value = ["a.nc"] + filenames = get_bucket_files("*.nc", ".") + assert [os.path.join(".", "a.nc")] == filenames gcsfs_inst.get.assert_not_called() # force redownload gcsfs_inst.get.reset_mock() - gcsfs_inst.glob.return_value = ['a.nc'] - filenames = get_bucket_files('*.nc', '.', force=True) - self.assertEqual([os.path.join('.', 'a.nc')], filenames) + gcsfs_inst.glob.return_value = ["a.nc"] + filenames = get_bucket_files("*.nc", ".", force=True) + assert [os.path.join(".", "a.nc")] == filenames gcsfs_inst.get.assert_called_once() # if we don't get any results then we expect an exception gcsfs_inst.get.reset_mock() gcsfs_inst.glob.return_value = [] - self.assertRaises(OSError, get_bucket_files, '*.nc', '.') + self.assertRaises(OSError, get_bucket_files, "*.nc", ".") - @mock.patch('satpy.demo._google_cloud_platform.gcsfs', None) + @mock.patch("satpy.demo._google_cloud_platform.gcsfs", None) def test_no_gcsfs(self): """Test that 'gcsfs' is required.""" from satpy.demo._google_cloud_platform import get_bucket_files - self.assertRaises(RuntimeError, get_bucket_files, '*.nc', '.') + self.assertRaises(RuntimeError, get_bucket_files, "*.nc", ".") class TestAHIDemoDownload: """Test the AHI demo data download.""" - @mock.patch.dict(sys.modules, {'s3fs': mock.MagicMock()}) + @mock.patch.dict(sys.modules, {"s3fs": mock.MagicMock()}) def test_ahi_full_download(self): """Test that the himawari download works as expected.""" from tempfile import gettempdir @@ -197,7 +197,7 @@ def test_ahi_full_download(self): files = download_typhoon_surigae_ahi(base_dir=gettempdir()) assert len(files) == 160 - @mock.patch.dict(sys.modules, {'s3fs': mock.MagicMock()}) + @mock.patch.dict(sys.modules, {"s3fs": mock.MagicMock()}) def test_ahi_partial_download(self): """Test that the himawari download works as expected.""" from tempfile import gettempdir @@ -244,11 +244,12 @@ class _FakeRequest: requests_log: list[str] = [] - def __init__(self, url, stream=None): + def __init__(self, url, stream=None, timeout=None): self._filename = os.path.basename(url) self.headers = {} self.requests_log.append(url) del stream # just mimicking requests 'get' + del timeout # just mimicking requests 'get' def __enter__(self): return self @@ -274,7 +275,7 @@ def iter_content(self, chunk_size): x = bytes_io.read(chunk_size) -@mock.patch('satpy.demo.utils.requests') +@mock.patch("satpy.demo.utils.requests") class TestVIIRSSDRDemoDownload: """Test VIIRS SDR downloading.""" @@ -284,20 +285,20 @@ class TestVIIRSSDRDemoDownload: "SVDNB") ALL_GEO_PREFIXES = ("GITCO", "GMTCO", "GDNBO") - def test_download(self, _requests, tmpdir): + def test_download(self, requests, tmpdir): """Test downloading VIIRS SDR data.""" from satpy.demo import get_viirs_sdr_20170128_1229 - _requests.get.side_effect = _FakeRequest + requests.get.side_effect = _FakeRequest with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir)) assert len(files) == 10 * (16 + 5 + 1 + 3) # 10 granules * (5 I bands + 16 M bands + 1 DNB + 3 geolocation) self._assert_bands_in_filenames_and_contents(self.ALL_BAND_PREFIXES + self.ALL_GEO_PREFIXES, files, 10) - def test_do_not_download_the_files_twice(self, _requests, tmpdir): + def test_do_not_download_the_files_twice(self, requests, tmpdir): """Test re-downloading VIIRS SDR data.""" from satpy.demo import get_viirs_sdr_20170128_1229 get_mock = mock.MagicMock() - _requests.get.return_value.__enter__ = get_mock + requests.get.return_value.__enter__ = get_mock with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir)) new_files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir)) @@ -307,21 +308,21 @@ def test_do_not_download_the_files_twice(self, _requests, tmpdir): assert get_mock.call_count == total_num_files assert new_files == files - def test_download_channels_num_granules_im(self, _requests, tmpdir): + def test_download_channels_num_granules_im(self, requests, tmpdir): """Test downloading VIIRS SDR I/M data with select granules.""" from satpy.demo import get_viirs_sdr_20170128_1229 - _requests.get.side_effect = _FakeRequest + requests.get.side_effect = _FakeRequest with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir), channels=("I01", "M01")) assert len(files) == 10 * (1 + 1 + 2) # 10 granules * (1 I band + 1 M band + 2 geolocation) self._assert_bands_in_filenames_and_contents(("SVI01", "SVM01", "GITCO", "GMTCO"), files, 10) - def test_download_channels_num_granules_im_twice(self, _requests, tmpdir): + def test_download_channels_num_granules_im_twice(self, requests, tmpdir): """Test re-downloading VIIRS SDR I/M data with select granules.""" from satpy.demo import get_viirs_sdr_20170128_1229 get_mock = mock.MagicMock() - _requests.get.return_value.__enter__ = get_mock + requests.get.return_value.__enter__ = get_mock with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir), channels=("I01", "M01")) @@ -334,10 +335,10 @@ def test_download_channels_num_granules_im_twice(self, _requests, tmpdir): assert len(files) == 2 * (1 + 1 + 2) # 2 granules * (1 I band + 1 M band + 2 geolocation) assert get_mock.call_count == num_first_batch - def test_download_channels_num_granules_dnb(self, _requests, tmpdir): + def test_download_channels_num_granules_dnb(self, requests, tmpdir): """Test downloading and re-downloading VIIRS SDR DNB data with select granules.""" from satpy.demo import get_viirs_sdr_20170128_1229 - _requests.get.side_effect = _FakeRequest + requests.get.side_effect = _FakeRequest with mock_filesystem(): files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir), channels=("DNB",), @@ -415,7 +416,7 @@ def setUp(self): self.subdir = os.path.join(".", "seviri_hrit", "20180228_1500") self.files = generate_subset_of_filenames(base_dir=self.subdir) - self.patcher = mock.patch('satpy.demo.utils.requests.get', autospec=True) + self.patcher = mock.patch("satpy.demo.utils.requests.get", autospec=True) self.get_mock = self.patcher.start() _FakeRequest.requests_log = [] @@ -450,12 +451,12 @@ def test_download_a_subset_of_files(self): with mock_filesystem(): files = download_seviri_hrit_20180228_1500(subset={"HRV": [1, 2, 3], "IR_108": [1, 2], "EPI": None}) assert set(files) == set(os.path.join(self.subdir, filename) for filename in [ - 'H-000-MSG4__-MSG4________-_________-EPI______-201802281500-__', - 'H-000-MSG4__-MSG4________-HRV______-000001___-201802281500-__', - 'H-000-MSG4__-MSG4________-HRV______-000002___-201802281500-__', - 'H-000-MSG4__-MSG4________-HRV______-000003___-201802281500-__', - 'H-000-MSG4__-MSG4________-IR_108___-000001___-201802281500-__', - 'H-000-MSG4__-MSG4________-IR_108___-000002___-201802281500-__', + "H-000-MSG4__-MSG4________-_________-EPI______-201802281500-__", + "H-000-MSG4__-MSG4________-HRV______-000001___-201802281500-__", + "H-000-MSG4__-MSG4________-HRV______-000002___-201802281500-__", + "H-000-MSG4__-MSG4________-HRV______-000003___-201802281500-__", + "H-000-MSG4__-MSG4________-IR_108___-000001___-201802281500-__", + "H-000-MSG4__-MSG4________-IR_108___-000002___-201802281500-__", ]) def test_do_not_download_same_file_twice(self): diff --git a/satpy/tests/test_dependency_tree.py b/satpy/tests/test_dependency_tree.py index 415a927cc5..40433c0032 100644 --- a/satpy/tests/test_dependency_tree.py +++ b/satpy/tests/test_dependency_tree.py @@ -59,7 +59,7 @@ def setUp(self): self.dependency_tree.add_leaf(dependency_2_1, node_dependency_2) # We don't need to add the unmodified dependency a second time. - dependency_3 = make_dataid(name='ds2', resolution=250, calibration="reflectance", modifiers=tuple()) + dependency_3 = make_dataid(name="ds2", resolution=250, calibration="reflectance", modifiers=tuple()) self.dependency_tree.add_leaf(dependency_3, node_composite_1) @staticmethod @@ -78,7 +78,7 @@ def test_copy_preserves_all_nodes(self): new_dependency_tree.trunk()) # make sure that we can get access to sub-nodes - c13_id = make_cid(name='comp13') + c13_id = make_cid(name="comp13") assert self._nodes_equal(self.dependency_tree.trunk(limit_nodes_to=[c13_id]), new_dependency_tree.trunk(limit_nodes_to=[c13_id])) @@ -87,10 +87,8 @@ def test_copy_preserves_unique_empty_node(self): new_dependency_tree = self.dependency_tree.copy() assert self.dependency_tree.empty_node is new_dependency_tree.empty_node - self.assertIs(self.dependency_tree._root.children[0].children[0].children[1], - self.dependency_tree.empty_node) - self.assertIs(new_dependency_tree._root.children[0].children[0].children[1], - self.dependency_tree.empty_node) + assert self.dependency_tree._root.children[0].children[0].children[1] is self.dependency_tree.empty_node + assert new_dependency_tree._root.children[0].children[0].children[1] is self.dependency_tree.empty_node def test_new_dependency_tree_preserves_unique_empty_node(self): """Test that dependency tree instantiation preserves the uniqueness of the empty node.""" @@ -104,14 +102,14 @@ class TestMissingDependencies(unittest.TestCase): def test_new_missing_dependencies(self): """Test new MissingDependencies.""" from satpy.node import MissingDependencies - error = MissingDependencies('bla') - assert error.missing_dependencies == 'bla' + error = MissingDependencies("bla") + assert error.missing_dependencies == "bla" def test_new_missing_dependencies_with_message(self): """Test new MissingDependencies with a message.""" from satpy.node import MissingDependencies - error = MissingDependencies('bla', "This is a message") - assert 'This is a message' in str(error) + error = MissingDependencies("bla", "This is a message") + assert "This is a message" in str(error) class TestMultipleResolutionSameChannelDependency(unittest.TestCase): @@ -126,27 +124,27 @@ def test_modis_overview_1000m(self): from satpy.modifiers.geometry import SunZenithCorrector from satpy.readers.yaml_reader import FileYAMLReader - config_file = os.path.join(PACKAGE_CONFIG_PATH, 'readers', 'modis_l1b.yaml') + config_file = os.path.join(PACKAGE_CONFIG_PATH, "readers", "modis_l1b.yaml") self.reader_instance = FileYAMLReader.from_config_files(config_file) - overview = {'_satpy_id': make_dataid(name='overview'), - 'name': 'overview', - 'optional_prerequisites': [], - 'prerequisites': [DataQuery(name='1', modifiers=('sunz_corrected',)), - DataQuery(name='2', modifiers=('sunz_corrected',)), - DataQuery(name='31')], - 'standard_name': 'overview'} - compositors = {'modis': DatasetDict()} - compositors['modis']['overview'] = GenericCompositor(**overview) - - modifiers = {'modis': {'sunz_corrected': (SunZenithCorrector, - {'optional_prerequisites': ['solar_zenith_angle'], - 'name': 'sunz_corrected', - 'prerequisites': []})}} - dep_tree = DependencyTree({'modis_l1b': self.reader_instance}, compositors, modifiers) - dep_tree.populate_with_keys({'overview'}, DataQuery(resolution=1000)) + overview = {"_satpy_id": make_dataid(name="overview"), + "name": "overview", + "optional_prerequisites": [], + "prerequisites": [DataQuery(name="1", modifiers=("sunz_corrected",)), + DataQuery(name="2", modifiers=("sunz_corrected",)), + DataQuery(name="31")], + "standard_name": "overview"} + compositors = {"modis": DatasetDict()} + compositors["modis"]["overview"] = GenericCompositor(**overview) + + modifiers = {"modis": {"sunz_corrected": (SunZenithCorrector, + {"optional_prerequisites": ["solar_zenith_angle"], + "name": "sunz_corrected", + "prerequisites": []})}} + dep_tree = DependencyTree({"modis_l1b": self.reader_instance}, compositors, modifiers) + dep_tree.populate_with_keys({"overview"}, DataQuery(resolution=1000)) for key in dep_tree._all_nodes.keys(): - assert key.get('resolution', 1000) == 1000 + assert key.get("resolution", 1000) == 1000 class TestMultipleSensors(unittest.TestCase): @@ -194,18 +192,18 @@ def __call__(self, *args, **kwargs): # create the dictionary one element at a time to force "incorrect" order # (sensor2 comes before sensor1, but results should be alphabetical order) compositors = {} - compositors['sensor2'] = s2_comps = DatasetDict() - compositors['sensor1'] = s1_comps = DatasetDict() - c1_s2_id = make_cid(name='comp1', resolution=1000) - c1_s1_id = make_cid(name='comp1', resolution=500) + compositors["sensor2"] = s2_comps = DatasetDict() + compositors["sensor1"] = s1_comps = DatasetDict() + c1_s2_id = make_cid(name="comp1", resolution=1000) + c1_s1_id = make_cid(name="comp1", resolution=500) s2_comps[c1_s2_id] = comp1_sensor2 s1_comps[c1_s1_id] = comp1_sensor1 modifiers = {} - modifiers['sensor2'] = s2_mods = {} - modifiers['sensor1'] = s1_mods = {} - s2_mods['mod1'] = (_FakeModifier, {'ret_val': 2}) - s1_mods['mod1'] = (_FakeModifier, {'ret_val': 1}) + modifiers["sensor2"] = s2_mods = {} + modifiers["sensor1"] = s1_mods = {} + s2_mods["mod1"] = (_FakeModifier, {"ret_val": 2}) + s1_mods["mod1"] = (_FakeModifier, {"ret_val": 1}) self.dependency_tree = DependencyTree({}, compositors, modifiers) # manually add a leaf so we don't have to mock a reader @@ -214,16 +212,16 @@ def __call__(self, *args, **kwargs): def test_compositor_loaded_sensor_order(self): """Test that a compositor is loaded from the first alphabetical sensor.""" - self.dependency_tree.populate_with_keys({'comp1'}) + self.dependency_tree.populate_with_keys({"comp1"}) comp_nodes = self.dependency_tree.trunk() - self.assertEqual(len(comp_nodes), 1) - self.assertEqual(comp_nodes[0].name["resolution"], 500) + assert len(comp_nodes) == 1 + assert comp_nodes[0].name["resolution"] == 500 def test_modifier_loaded_sensor_order(self): """Test that a modifier is loaded from the first alphabetical sensor.""" from satpy import DataQuery - dq = DataQuery(name='ds5', modifiers=('mod1',)) + dq = DataQuery(name="ds5", modifiers=("mod1",)) self.dependency_tree.populate_with_keys({dq}) comp_nodes = self.dependency_tree.trunk() - self.assertEqual(len(comp_nodes), 1) - self.assertEqual(comp_nodes[0].data[0].ret_val, 1) + assert len(comp_nodes) == 1 + assert comp_nodes[0].data[0].ret_val == 1 diff --git a/satpy/tests/test_file_handlers.py b/satpy/tests/test_file_handlers.py index 46f4a16784..403e686204 100644 --- a/satpy/tests/test_file_handlers.py +++ b/satpy/tests/test_file_handlers.py @@ -32,7 +32,7 @@ def test_open_dataset(): """Test xr.open_dataset wrapper.""" fn = mock.MagicMock() str_file_path = "path/to/file.nc" - with mock.patch('xarray.open_dataset') as xr_open: + with mock.patch("xarray.open_dataset") as xr_open: _ = open_dataset(fn, decode_cf=True, chunks=500) fn.open.assert_called_once_with() xr_open.assert_called_once_with(fn.open(), decode_cf=True, chunks=500) @@ -48,110 +48,110 @@ class TestBaseFileHandler(unittest.TestCase): def setUp(self): """Set up the test.""" self.fh = BaseFileHandler( - 'filename', {'filename_info': 'bla'}, 'filetype_info') + "filename", {"filename_info": "bla"}, "filetype_info") def test_combine_times(self): """Combine times.""" - info1 = {'start_time': 1} - info2 = {'start_time': 2} + info1 = {"start_time": 1} + info2 = {"start_time": 2} res = self.fh.combine_info([info1, info2]) - exp = {'start_time': 1} - self.assertDictEqual(res, exp) + exp = {"start_time": 1} + assert res == exp res = self.fh.combine_info([info2, info1]) - exp = {'start_time': 1} - self.assertDictEqual(res, exp) + exp = {"start_time": 1} + assert res == exp - info1 = {'end_time': 1} - info2 = {'end_time': 2} + info1 = {"end_time": 1} + info2 = {"end_time": 2} res = self.fh.combine_info([info1, info2]) - exp = {'end_time': 2} - self.assertDictEqual(res, exp) + exp = {"end_time": 2} + assert res == exp res = self.fh.combine_info([info2, info1]) - exp = {'end_time': 2} - self.assertDictEqual(res, exp) + exp = {"end_time": 2} + assert res == exp def test_combine_orbits(self): """Combine orbits.""" - info1 = {'start_orbit': 1} - info2 = {'start_orbit': 2} + info1 = {"start_orbit": 1} + info2 = {"start_orbit": 2} res = self.fh.combine_info([info1, info2]) - exp = {'start_orbit': 1} - self.assertDictEqual(res, exp) + exp = {"start_orbit": 1} + assert res == exp res = self.fh.combine_info([info2, info1]) - exp = {'start_orbit': 1} - self.assertDictEqual(res, exp) + exp = {"start_orbit": 1} + assert res == exp - info1 = {'end_orbit': 1} - info2 = {'end_orbit': 2} + info1 = {"end_orbit": 1} + info2 = {"end_orbit": 2} res = self.fh.combine_info([info1, info2]) - exp = {'end_orbit': 2} - self.assertDictEqual(res, exp) + exp = {"end_orbit": 2} + assert res == exp res = self.fh.combine_info([info2, info1]) - exp = {'end_orbit': 2} - self.assertDictEqual(res, exp) + exp = {"end_orbit": 2} + assert res == exp - @mock.patch('satpy.readers.file_handlers.SwathDefinition') + @mock.patch("satpy.readers.file_handlers.SwathDefinition") def test_combine_area(self, sdef): """Combine area.""" area1 = mock.MagicMock() area1.lons = np.arange(5) area1.lats = np.arange(5) - area1.name = 'area1' + area1.name = "area1" area2 = mock.MagicMock() area2.lons = np.arange(5) area2.lats = np.arange(5) - area2.name = 'area2' + area2.name = "area2" - info1 = {'area': area1} - info2 = {'area': area2} + info1 = {"area": area1} + info2 = {"area": area2} self.fh.combine_info([info1, info2]) - self.assertTupleEqual(sdef.call_args[1]['lons'].shape, (2, 5)) - self.assertTupleEqual(sdef.call_args[1]['lats'].shape, (2, 5)) - self.assertEqual(sdef.return_value.name, 'area1_area2') + assert sdef.call_args[1]["lons"].shape == (2, 5) + assert sdef.call_args[1]["lats"].shape == (2, 5) + assert sdef.return_value.name == "area1_area2" def test_combine_orbital_parameters(self): """Combine orbital parameters.""" - info1 = {'orbital_parameters': {'projection_longitude': 1, - 'projection_latitude': 1, - 'projection_altitude': 1, - 'satellite_nominal_longitude': 1, - 'satellite_nominal_latitude': 1, - 'satellite_actual_longitude': 1, - 'satellite_actual_latitude': 1, - 'satellite_actual_altitude': 1, - 'nadir_longitude': 1, - 'nadir_latitude': 1, - 'only_in_1': False}} - info2 = {'orbital_parameters': {'projection_longitude': 2, - 'projection_latitude': 2, - 'projection_altitude': 2, - 'satellite_nominal_longitude': 2, - 'satellite_nominal_latitude': 2, - 'satellite_actual_longitude': 2, - 'satellite_actual_latitude': 2, - 'satellite_actual_altitude': 2, - 'nadir_longitude': 2, - 'nadir_latitude': 2, - 'only_in_2': True}} - exp = {'orbital_parameters': {'projection_longitude': 1.5, - 'projection_latitude': 1.5, - 'projection_altitude': 1.5, - 'satellite_nominal_longitude': 1.5, - 'satellite_nominal_latitude': 1.5, - 'satellite_actual_longitude': 1.5, - 'satellite_actual_latitude': 1.5, - 'satellite_actual_altitude': 1.5, - 'nadir_longitude': 1.5, - 'nadir_latitude': 1.5, - 'only_in_1': False, - 'only_in_2': True}} + info1 = {"orbital_parameters": {"projection_longitude": 1, + "projection_latitude": 1, + "projection_altitude": 1, + "satellite_nominal_longitude": 1, + "satellite_nominal_latitude": 1, + "satellite_actual_longitude": 1, + "satellite_actual_latitude": 1, + "satellite_actual_altitude": 1, + "nadir_longitude": 1, + "nadir_latitude": 1, + "only_in_1": False}} + info2 = {"orbital_parameters": {"projection_longitude": 2, + "projection_latitude": 2, + "projection_altitude": 2, + "satellite_nominal_longitude": 2, + "satellite_nominal_latitude": 2, + "satellite_actual_longitude": 2, + "satellite_actual_latitude": 2, + "satellite_actual_altitude": 2, + "nadir_longitude": 2, + "nadir_latitude": 2, + "only_in_2": True}} + exp = {"orbital_parameters": {"projection_longitude": 1.5, + "projection_latitude": 1.5, + "projection_altitude": 1.5, + "satellite_nominal_longitude": 1.5, + "satellite_nominal_latitude": 1.5, + "satellite_actual_longitude": 1.5, + "satellite_actual_latitude": 1.5, + "satellite_actual_altitude": 1.5, + "nadir_longitude": 1.5, + "nadir_latitude": 1.5, + "only_in_1": False, + "only_in_2": True}} res = self.fh.combine_info([info1, info2]) - self.assertDictEqual(res, exp) + assert res == exp # Identity - self.assertEqual(self.fh.combine_info([info1]), info1) + assert self.fh.combine_info([info1]) == info1 # Empty self.fh.combine_info([{}]) @@ -159,34 +159,34 @@ def test_combine_orbital_parameters(self): def test_combine_time_parameters(self): """Combine times in 'time_parameters.""" time_params1 = { - 'nominal_start_time': datetime(2020, 1, 1, 12, 0, 0), - 'nominal_end_time': datetime(2020, 1, 1, 12, 2, 30), - 'observation_start_time': datetime(2020, 1, 1, 12, 0, 2, 23821), - 'observation_end_time': datetime(2020, 1, 1, 12, 2, 23, 12348), + "nominal_start_time": datetime(2020, 1, 1, 12, 0, 0), + "nominal_end_time": datetime(2020, 1, 1, 12, 2, 30), + "observation_start_time": datetime(2020, 1, 1, 12, 0, 2, 23821), + "observation_end_time": datetime(2020, 1, 1, 12, 2, 23, 12348), } time_params2 = {} time_shift = timedelta(seconds=1.5) for key, value in time_params1.items(): time_params2[key] = value + time_shift res = self.fh.combine_info([ - {'time_parameters': time_params1}, - {'time_parameters': time_params2} + {"time_parameters": time_params1}, + {"time_parameters": time_params2} ]) - res_time_params = res['time_parameters'] - assert res_time_params['nominal_start_time'] == datetime(2020, 1, 1, 12, 0, 0) - assert res_time_params['nominal_end_time'] == datetime(2020, 1, 1, 12, 2, 31, 500000) - assert res_time_params['observation_start_time'] == datetime(2020, 1, 1, 12, 0, 2, 23821) - assert res_time_params['observation_end_time'] == datetime(2020, 1, 1, 12, 2, 24, 512348) + res_time_params = res["time_parameters"] + assert res_time_params["nominal_start_time"] == datetime(2020, 1, 1, 12, 0, 0) + assert res_time_params["nominal_end_time"] == datetime(2020, 1, 1, 12, 2, 31, 500000) + assert res_time_params["observation_start_time"] == datetime(2020, 1, 1, 12, 0, 2, 23821) + assert res_time_params["observation_end_time"] == datetime(2020, 1, 1, 12, 2, 24, 512348) def test_file_is_kept_intact(self): """Test that the file object passed (string, path, or other) is kept intact.""" open_file = mock.MagicMock() - bfh = BaseFileHandler(open_file, {'filename_info': 'bla'}, 'filetype_info') + bfh = BaseFileHandler(open_file, {"filename_info": "bla"}, "filetype_info") assert bfh.filename == open_file from pathlib import Path - filename = Path('/bla/bla.nc') - bfh = BaseFileHandler(filename, {'filename_info': 'bla'}, 'filetype_info') + filename = Path("/bla/bla.nc") + bfh = BaseFileHandler(filename, {"filename_info": "bla"}, "filetype_info") assert isinstance(bfh.filename, Path) diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index d79db17d74..4aece73487 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -30,43 +30,43 @@ def _sunz_area_def(): """Get fake area for testing sunz generation.""" - area = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 2, 2, + area = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 2, 2, (-2000, -2000, 2000, 2000)) return area def _sunz_bigger_area_def(): """Get area that is twice the size of 'sunz_area_def'.""" - bigger_area = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 4, 4, + bigger_area = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 4, 4, (-2000, -2000, 2000, 2000)) return bigger_area def _sunz_stacked_area_def(): """Get fake stacked area for testing sunz generation.""" - area1 = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 2, 1, + area1 = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 2, 1, (-2000, 0, 2000, 2000)) - area2 = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 2, 1, + area2 = AreaDefinition("test", "test", "test", + {"proj": "merc"}, 2, 1, (-2000, -2000, 2000, 0)) return StackedAreaDefinition(area1, area2) def _shared_sunz_attrs(area_def): - attrs = {'area': area_def, - 'start_time': datetime(2018, 1, 1, 18), - 'modifiers': tuple(), - 'name': 'test_vis'} + attrs = {"area": area_def, + "start_time": datetime(2018, 1, 1, 18), + "modifiers": tuple(), + "name": "test_vis"} return attrs def _get_ds1(attrs): ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64), - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}) return ds1 @@ -89,8 +89,8 @@ def sunz_ds2(): """Generate larger fake dataset for sunz tests.""" attrs = _shared_sunz_attrs(_sunz_bigger_area_def()) ds2 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64), - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 0.5, 1, 1.5], 'x': [0, 0.5, 1, 1.5]}) + attrs=attrs, dims=("y", "x"), + coords={"y": [0, 0.5, 1, 1.5], "x": [0, 0.5, 1, 1.5]}) return ds2 @@ -100,9 +100,9 @@ def sunz_sza(): sza = xr.DataArray( np.rad2deg(np.arccos(da.from_array([[0.0149581333, 0.0146694376], [0.0150812684, 0.0147925727]], chunks=2))), - attrs={'area': _sunz_area_def()}, - dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}, + attrs={"area": _sunz_area_def()}, + dims=("y", "x"), + coords={"y": [0, 1], "x": [0, 1]}, ) return sza @@ -117,49 +117,49 @@ def test_basic_default_not_provided(self, sunz_ds1, as_32bit): if as_32bit: sunz_ds1 = sunz_ds1.astype(np.float32) - comp = SunZenithCorrector(name='sza_test', modifiers=tuple()) - res = comp((sunz_ds1,), test_attr='test') + comp = SunZenithCorrector(name="sza_test", modifiers=tuple()) + res = comp((sunz_ds1,), test_attr="test") np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) - assert 'y' in res.coords - assert 'x' in res.coords - ds1 = sunz_ds1.copy().drop_vars(('y', 'x')) - res = comp((ds1,), test_attr='test') + assert "y" in res.coords + assert "x" in res.coords + ds1 = sunz_ds1.copy().drop_vars(("y", "x")) + res = comp((ds1,), test_attr="test") res_np = res.compute() np.testing.assert_allclose(res_np.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) assert res.dtype == res_np.dtype - assert 'y' not in res.coords - assert 'x' not in res.coords + assert "y" not in res.coords + assert "x" not in res.coords def test_basic_lims_not_provided(self, sunz_ds1): """Test custom limits when SZA isn't provided.""" from satpy.modifiers.geometry import SunZenithCorrector - comp = SunZenithCorrector(name='sza_test', modifiers=tuple(), correction_limit=90) - res = comp((sunz_ds1,), test_attr='test') + comp = SunZenithCorrector(name="sza_test", modifiers=tuple(), correction_limit=90) + res = comp((sunz_ds1,), test_attr="test") np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]])) @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")]) def test_basic_default_provided(self, data_arr, sunz_sza): """Test default limits when SZA is provided.""" from satpy.modifiers.geometry import SunZenithCorrector - comp = SunZenithCorrector(name='sza_test', modifiers=tuple()) - res = comp((data_arr, sunz_sza), test_attr='test') + comp = SunZenithCorrector(name="sza_test", modifiers=tuple()) + res = comp((data_arr, sunz_sza), test_attr="test") np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")]) def test_basic_lims_provided(self, data_arr, sunz_sza): """Test custom limits when SZA is provided.""" from satpy.modifiers.geometry import SunZenithCorrector - comp = SunZenithCorrector(name='sza_test', modifiers=tuple(), correction_limit=90) - res = comp((data_arr, sunz_sza), test_attr='test') + comp = SunZenithCorrector(name="sza_test", modifiers=tuple(), correction_limit=90) + res = comp((data_arr, sunz_sza), test_attr="test") np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]])) def test_imcompatible_areas(self, sunz_ds2, sunz_sza): """Test sunz correction on incompatible areas.""" from satpy.composites import IncompatibleAreas from satpy.modifiers.geometry import SunZenithCorrector - comp = SunZenithCorrector(name='sza_test', modifiers=tuple(), correction_limit=90) + comp = SunZenithCorrector(name="sza_test", modifiers=tuple(), correction_limit=90) with pytest.raises(IncompatibleAreas): - comp((sunz_ds2, sunz_sza), test_attr='test') + comp((sunz_ds2, sunz_sza), test_attr="test") class TestSunZenithReducer: @@ -169,20 +169,20 @@ class TestSunZenithReducer: def setup_class(cls): """Initialze SunZenithReducer classes that shall be tested.""" from satpy.modifiers.geometry import SunZenithReducer - cls.default = SunZenithReducer(name='sza_reduction_test_default', modifiers=tuple()) - cls.custom = SunZenithReducer(name='sza_reduction_test_custom', modifiers=tuple(), + cls.default = SunZenithReducer(name="sza_reduction_test_default", modifiers=tuple()) + cls.custom = SunZenithReducer(name="sza_reduction_test_custom", modifiers=tuple(), correction_limit=70, max_sza=95, strength=3.0) def test_default_settings(self, sunz_ds1, sunz_sza): """Test default settings with sza data available.""" - res = self.default((sunz_ds1, sunz_sza), test_attr='test') + res = self.default((sunz_ds1, sunz_sza), test_attr="test") np.testing.assert_allclose(res.values, np.array([[0.00242814, 0.00235669], [0.00245885, 0.00238707]]), rtol=1e-5) def test_custom_settings(self, sunz_ds1, sunz_sza): """Test custom settings with sza data available.""" - res = self.custom((sunz_ds1, sunz_sza), test_attr='test') + res = self.custom((sunz_ds1, sunz_sza), test_attr="test") np.testing.assert_allclose(res.values, np.array([[0.01041319, 0.01030033], [0.01046164, 0.01034834]]), rtol=1e-5) @@ -190,8 +190,8 @@ def test_custom_settings(self, sunz_ds1, sunz_sza): def test_invalid_max_sza(self, sunz_ds1, sunz_sza): """Test invalid max_sza with sza data available.""" from satpy.modifiers.geometry import SunZenithReducer - with pytest.raises(ValueError): - SunZenithReducer(name='sza_reduction_test_invalid', modifiers=tuple(), max_sza=None) + with pytest.raises(ValueError, match="`max_sza` must be defined when using the SunZenithReducer."): + SunZenithReducer(name="sza_reduction_test_invalid", modifiers=tuple(), max_sza=None) class TestNIRReflectance(unittest.TestCase): @@ -205,24 +205,24 @@ def setUp(self): area = mock.MagicMock(get_lonlats=self.get_lonlats) self.start_time = 1 - self.metadata = {'platform_name': 'Meteosat-11', - 'sensor': 'seviri', - 'name': 'IR_039', - 'area': area, - 'start_time': self.start_time} + self.metadata = {"platform_name": "Meteosat-11", + "sensor": "seviri", + "name": "IR_039", + "area": area, + "start_time": self.start_time} nir_arr = np.random.random((2, 2)) - self.nir = xr.DataArray(da.from_array(nir_arr), dims=['y', 'x']) + self.nir = xr.DataArray(da.from_array(nir_arr), dims=["y", "x"]) self.nir.attrs.update(self.metadata) ir_arr = 100 * np.random.random((2, 2)) - self.ir_ = xr.DataArray(da.from_array(ir_arr), dims=['y', 'x']) - self.ir_.attrs['area'] = area + self.ir_ = xr.DataArray(da.from_array(ir_arr), dims=["y", "x"]) + self.ir_.attrs["area"] = area self.sunz_arr = 100 * np.random.random((2, 2)) - self.sunz = xr.DataArray(da.from_array(self.sunz_arr), dims=['y', 'x']) - self.sunz.attrs['standard_name'] = 'solar_zenith_angle' - self.sunz.attrs['area'] = area + self.sunz = xr.DataArray(da.from_array(self.sunz_arr), dims=["y", "x"]) + self.sunz.attrs["standard_name"] = "solar_zenith_angle" + self.sunz.attrs["area"] = area self.da_sunz = da.from_array(self.sunz_arr) refl_arr = np.random.random((2, 2)) @@ -238,9 +238,9 @@ def fake_refl_from_tbs(self, sun_zenith, da_nir, da_tb11, tb_ir_co2=None): return self.refl_with_co2 return self.refl - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_provide_sunz_no_co2(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor provided only sunz.""" calculator.return_value = mock.MagicMock( @@ -248,18 +248,18 @@ def test_provide_sunz_no_co2(self, calculator, apply_modifier_info, sza): sza.return_value = self.da_sunz from satpy.modifiers.spectral import NIRReflectance - comp = NIRReflectance(name='test') - info = {'modifiers': None} + comp = NIRReflectance(name="test") + info = {"modifiers": None} res = comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) assert self.metadata.items() <= res.attrs.items() - assert res.attrs['units'] == '%' - assert res.attrs['sun_zenith_threshold'] is not None + assert res.attrs["units"] == "%" + assert res.attrs["sun_zenith_threshold"] is not None assert np.allclose(res.data, self.refl * 100).compute() - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_no_sunz_no_co2(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor with minimal parameters.""" calculator.return_value = mock.MagicMock( @@ -267,8 +267,8 @@ def test_no_sunz_no_co2(self, calculator, apply_modifier_info, sza): sza.return_value = self.da_sunz from satpy.modifiers.spectral import NIRReflectance - comp = NIRReflectance(name='test') - info = {'modifiers': None} + comp = NIRReflectance(name="test") + info = {"modifiers": None} res = comp([self.nir, self.ir_], optional_datasets=[], **info) # due to copying of DataArrays, self.get_lonlats is not the same as the one that was called @@ -278,9 +278,9 @@ def test_no_sunz_no_co2(self, calculator, apply_modifier_info, sza): self.refl_from_tbs.assert_called_with(self.da_sunz, self.nir.data, self.ir_.data, tb_ir_co2=None) assert np.allclose(res.data, self.refl * 100).compute() - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_no_sunz_with_co2(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor provided extra co2 info.""" calculator.return_value = mock.MagicMock( @@ -288,20 +288,20 @@ def test_no_sunz_with_co2(self, calculator, apply_modifier_info, sza): from satpy.modifiers.spectral import NIRReflectance sza.return_value = self.da_sunz - comp = NIRReflectance(name='test') - info = {'modifiers': None} + comp = NIRReflectance(name="test") + info = {"modifiers": None} co2_arr = np.random.random((2, 2)) - co2 = xr.DataArray(da.from_array(co2_arr), dims=['y', 'x']) - co2.attrs['wavelength'] = [12.0, 13.0, 14.0] - co2.attrs['units'] = 'K' + co2 = xr.DataArray(da.from_array(co2_arr), dims=["y", "x"]) + co2.attrs["wavelength"] = [12.0, 13.0, 14.0] + co2.attrs["units"] = "K" res = comp([self.nir, self.ir_], optional_datasets=[co2], **info) self.refl_from_tbs.assert_called_with(self.da_sunz, self.nir.data, self.ir_.data, tb_ir_co2=co2.data) assert np.allclose(res.data, self.refl_with_co2 * 100).compute() - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_provide_sunz_and_threshold(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor provided sunz and a sunz threshold.""" calculator.return_value = mock.MagicMock( @@ -309,32 +309,32 @@ def test_provide_sunz_and_threshold(self, calculator, apply_modifier_info, sza): from satpy.modifiers.spectral import NIRReflectance sza.return_value = self.da_sunz - comp = NIRReflectance(name='test', sunz_threshold=84.0) - info = {'modifiers': None} + comp = NIRReflectance(name="test", sunz_threshold=84.0) + info = {"modifiers": None} res = comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) - self.assertEqual(res.attrs['sun_zenith_threshold'], 84.0) - calculator.assert_called_with('Meteosat-11', 'seviri', 'IR_039', + assert res.attrs["sun_zenith_threshold"] == 84.0 + calculator.assert_called_with("Meteosat-11", "seviri", "IR_039", sunz_threshold=84.0, masking_limit=NIRReflectance.MASKING_LIMIT) - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_sunz_threshold_default_value_is_not_none(self, calculator, apply_modifier_info, sza): """Check that sun_zenith_threshold is not None.""" from satpy.modifiers.spectral import NIRReflectance - comp = NIRReflectance(name='test') - info = {'modifiers': None} + comp = NIRReflectance(name="test") + info = {"modifiers": None} calculator.return_value = mock.MagicMock( reflectance_from_tbs=self.refl_from_tbs) comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) assert comp.sun_zenith_threshold is not None - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_provide_masking_limit(self, calculator, apply_modifier_info, sza): """Test NIR reflectance compositor provided sunz and a sunz threshold.""" calculator.return_value = mock.MagicMock( @@ -342,23 +342,23 @@ def test_provide_masking_limit(self, calculator, apply_modifier_info, sza): from satpy.modifiers.spectral import NIRReflectance sza.return_value = self.da_sunz - comp = NIRReflectance(name='test', masking_limit=None) - info = {'modifiers': None} + comp = NIRReflectance(name="test", masking_limit=None) + info = {"modifiers": None} res = comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) - self.assertIsNone(res.attrs['sun_zenith_masking_limit']) - calculator.assert_called_with('Meteosat-11', 'seviri', 'IR_039', + assert res.attrs["sun_zenith_masking_limit"] is None + calculator.assert_called_with("Meteosat-11", "seviri", "IR_039", sunz_threshold=NIRReflectance.TERMINATOR_LIMIT, masking_limit=None) - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_masking_limit_default_value_is_not_none(self, calculator, apply_modifier_info, sza): """Check that sun_zenith_threshold is not None.""" from satpy.modifiers.spectral import NIRReflectance - comp = NIRReflectance(name='test') - info = {'modifiers': None} + comp = NIRReflectance(name="test") + info = {"modifiers": None} calculator.return_value = mock.MagicMock( reflectance_from_tbs=self.refl_from_tbs) comp([self.nir, self.ir_], optional_datasets=[self.sunz], **info) @@ -369,9 +369,9 @@ def test_masking_limit_default_value_is_not_none(self, calculator, apply_modifie class TestNIREmissivePartFromReflectance(unittest.TestCase): """Test the NIR Emissive part from reflectance compositor.""" - @mock.patch('satpy.modifiers.spectral.sun_zenith_angle') - @mock.patch('satpy.modifiers.NIRReflectance.apply_modifier_info') - @mock.patch('satpy.modifiers.spectral.Calculator') + @mock.patch("satpy.modifiers.spectral.sun_zenith_angle") + @mock.patch("satpy.modifiers.NIRReflectance.apply_modifier_info") + @mock.patch("satpy.modifiers.spectral.Calculator") def test_compositor(self, calculator, apply_modifier_info, sza): """Test the NIR emissive part from reflectance compositor.""" from satpy.modifiers.spectral import NIRReflectance @@ -391,12 +391,12 @@ def test_compositor(self, calculator, apply_modifier_info, sza): from satpy.modifiers.spectral import NIREmissivePartFromReflectance - comp = NIREmissivePartFromReflectance(name='test', sunz_threshold=86.0) - info = {'modifiers': None} + comp = NIREmissivePartFromReflectance(name="test", sunz_threshold=86.0) + info = {"modifiers": None} - platform = 'NOAA-20' - sensor = 'viirs' - chan_name = 'M12' + platform = "NOAA-20" + sensor = "viirs" + chan_name = "M12" get_lonlats = mock.MagicMock() lons, lats = 1, 2 @@ -404,29 +404,29 @@ def test_compositor(self, calculator, apply_modifier_info, sza): area = mock.MagicMock(get_lonlats=get_lonlats) nir_arr = np.random.random((2, 2)) - nir = xr.DataArray(da.from_array(nir_arr), dims=['y', 'x']) - nir.attrs['platform_name'] = platform - nir.attrs['sensor'] = sensor - nir.attrs['name'] = chan_name - nir.attrs['area'] = area + nir = xr.DataArray(da.from_array(nir_arr), dims=["y", "x"]) + nir.attrs["platform_name"] = platform + nir.attrs["sensor"] = sensor + nir.attrs["name"] = chan_name + nir.attrs["area"] = area ir_arr = np.random.random((2, 2)) - ir_ = xr.DataArray(da.from_array(ir_arr), dims=['y', 'x']) - ir_.attrs['area'] = area + ir_ = xr.DataArray(da.from_array(ir_arr), dims=["y", "x"]) + ir_.attrs["area"] = area sunz_arr = 100 * np.random.random((2, 2)) - sunz = xr.DataArray(da.from_array(sunz_arr), dims=['y', 'x']) - sunz.attrs['standard_name'] = 'solar_zenith_angle' - sunz.attrs['area'] = area + sunz = xr.DataArray(da.from_array(sunz_arr), dims=["y", "x"]) + sunz.attrs["standard_name"] = "solar_zenith_angle" + sunz.attrs["area"] = area sunz2 = da.from_array(sunz_arr) sza.return_value = sunz2 res = comp([nir, ir_], optional_datasets=[sunz], **info) - self.assertEqual(res.attrs['sun_zenith_threshold'], 86.0) - self.assertEqual(res.attrs['units'], 'K') - self.assertEqual(res.attrs['platform_name'], platform) - self.assertEqual(res.attrs['sensor'], sensor) - self.assertEqual(res.attrs['name'], chan_name) - calculator.assert_called_with('NOAA-20', 'viirs', 'M12', sunz_threshold=86.0, + assert res.attrs["sun_zenith_threshold"] == 86.0 + assert res.attrs["units"] == "K" + assert res.attrs["platform_name"] == platform + assert res.attrs["sensor"] == sensor + assert res.attrs["name"] == chan_name + calculator.assert_called_with("NOAA-20", "viirs", "M12", sunz_threshold=86.0, masking_limit=NIRReflectance.MASKING_LIMIT) @@ -438,9 +438,9 @@ def _make_data_area(self): rows = 3 cols = 5 area = AreaDefinition( - 'some_area_name', 'On-the-fly area', 'geosabii', - {'a': '6378137.0', 'b': '6356752.31414', 'h': '35786023.0', 'lon_0': '-89.5', 'proj': 'geos', 'sweep': 'x', - 'units': 'm'}, + "some_area_name", "On-the-fly area", "geosabii", + {"a": "6378137.0", "b": "6356752.31414", "h": "35786023.0", "lon_0": "-89.5", "proj": "geos", "sweep": "x", + "units": "m"}, cols, rows, (-5434894.954752679, -5434894.964451744, 5434894.964451744, 5434894.954752679)) @@ -453,46 +453,46 @@ def _make_data_area(self): def _create_test_data(self, name, wavelength, resolution): area, dnb = self._make_data_area() input_band = xr.DataArray(dnb, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'platform_name': 'Himawari-8', - 'calibration': 'reflectance', 'units': '%', 'wavelength': wavelength, - 'name': name, 'resolution': resolution, 'sensor': 'ahi', - 'start_time': '2017-09-20 17:30:40.800000', - 'end_time': '2017-09-20 17:41:17.500000', - 'area': area, 'ancillary_variables': [], - 'orbital_parameters': { - 'satellite_nominal_longitude': -89.5, - 'satellite_nominal_latitude': 0.0, - 'satellite_nominal_altitude': 35786023.4375, + "platform_name": "Himawari-8", + "calibration": "reflectance", "units": "%", "wavelength": wavelength, + "name": name, "resolution": resolution, "sensor": "ahi", + "start_time": "2017-09-20 17:30:40.800000", + "end_time": "2017-09-20 17:41:17.500000", + "area": area, "ancillary_variables": [], + "orbital_parameters": { + "satellite_nominal_longitude": -89.5, + "satellite_nominal_latitude": 0.0, + "satellite_nominal_altitude": 35786023.4375, }, }) red_band = xr.DataArray(dnb, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'platform_name': 'Himawari-8', - 'calibration': 'reflectance', 'units': '%', 'wavelength': (0.62, 0.64, 0.66), - 'name': 'B03', 'resolution': 500, 'sensor': 'ahi', - 'start_time': '2017-09-20 17:30:40.800000', - 'end_time': '2017-09-20 17:41:17.500000', - 'area': area, 'ancillary_variables': [], - 'orbital_parameters': { - 'satellite_nominal_longitude': -89.5, - 'satellite_nominal_latitude': 0.0, - 'satellite_nominal_altitude': 35786023.4375, + "platform_name": "Himawari-8", + "calibration": "reflectance", "units": "%", "wavelength": (0.62, 0.64, 0.66), + "name": "B03", "resolution": 500, "sensor": "ahi", + "start_time": "2017-09-20 17:30:40.800000", + "end_time": "2017-09-20 17:41:17.500000", + "area": area, "ancillary_variables": [], + "orbital_parameters": { + "satellite_nominal_longitude": -89.5, + "satellite_nominal_latitude": 0.0, + "satellite_nominal_altitude": 35786023.4375, }, }) fake_angle_data = da.ones_like(dnb, dtype=np.float32) * 90.0 angle1 = xr.DataArray(fake_angle_data, - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'platform_name': 'Himawari-8', - 'calibration': 'reflectance', 'units': '%', 'wavelength': wavelength, - 'name': "satellite_azimuth_angle", 'resolution': resolution, 'sensor': 'ahi', - 'start_time': '2017-09-20 17:30:40.800000', - 'end_time': '2017-09-20 17:41:17.500000', - 'area': area, 'ancillary_variables': [], + "platform_name": "Himawari-8", + "calibration": "reflectance", "units": "%", "wavelength": wavelength, + "name": "satellite_azimuth_angle", "resolution": resolution, "sensor": "ahi", + "start_time": "2017-09-20 17:30:40.800000", + "end_time": "2017-09-20 17:41:17.500000", + "area": area, "ancillary_variables": [], }) return input_band, red_band, angle1, angle1, angle1, angle1 @@ -518,15 +518,15 @@ def test_rayleigh_corrector(self, name, wavelength, resolution, aerosol_type, re reduce_strength, exp_mean, exp_unique): """Test PSPRayleighReflectance with fake data.""" from satpy.modifiers.atmosphere import PSPRayleighReflectance - ray_cor = PSPRayleighReflectance(name=name, atmosphere='us-standard', aerosol_types=aerosol_type, + ray_cor = PSPRayleighReflectance(name=name, atmosphere="us-standard", aerosol_types=aerosol_type, reduce_lim_low=reduce_lim_low, reduce_lim_high=reduce_lim_high, reduce_strength=reduce_strength) - assert ray_cor.attrs['name'] == name - assert ray_cor.attrs['atmosphere'] == 'us-standard' - assert ray_cor.attrs['aerosol_types'] == aerosol_type - assert ray_cor.attrs['reduce_lim_low'] == reduce_lim_low - assert ray_cor.attrs['reduce_lim_high'] == reduce_lim_high - assert ray_cor.attrs['reduce_strength'] == reduce_strength + assert ray_cor.attrs["name"] == name + assert ray_cor.attrs["atmosphere"] == "us-standard" + assert ray_cor.attrs["aerosol_types"] == aerosol_type + assert ray_cor.attrs["reduce_lim_low"] == reduce_lim_low + assert ray_cor.attrs["reduce_lim_high"] == reduce_lim_high + assert ray_cor.attrs["reduce_strength"] == reduce_strength input_band, red_band, *_ = self._create_test_data(name, wavelength, resolution) res = ray_cor([input_band, red_band]) @@ -545,7 +545,7 @@ def test_rayleigh_with_angles(self, as_optionals): """Test PSPRayleighReflectance with angles provided.""" from satpy.modifiers.atmosphere import PSPRayleighReflectance aerosol_type = "rayleigh_only" - ray_cor = PSPRayleighReflectance(name="B01", atmosphere='us-standard', aerosol_types=aerosol_type) + ray_cor = PSPRayleighReflectance(name="B01", atmosphere="us-standard", aerosol_types=aerosol_type) prereqs, opt_prereqs = self._get_angles_prereqs_and_opts(as_optionals) with mock.patch("satpy.modifiers.atmosphere.get_angles") as get_angles: res = ray_cor(prereqs, opt_prereqs) @@ -596,15 +596,15 @@ def test_call(self): "nadir_latitude": 0.0, } band = xr.DataArray(da.zeros((5, 5)), - attrs={'area': area, - 'start_time': stime, - 'name': 'name', - 'platform_name': 'platform', - 'sensor': 'sensor', - 'orbital_parameters': orb_params}, - dims=('y', 'x')) + attrs={"area": area, + "start_time": stime, + "name": "name", + "platform_name": "platform", + "sensor": "sensor", + "orbital_parameters": orb_params}, + dims=("y", "x")) # Perform atmospherical correction - psp = PSPAtmosphericalCorrection(name='dummy') + psp = PSPAtmosphericalCorrection(name="dummy") res = psp(projectables=[band]) res.compute() diff --git a/satpy/tests/test_node.py b/satpy/tests/test_node.py index 8a41082266..7475b04d24 100644 --- a/satpy/tests/test_node.py +++ b/satpy/tests/test_node.py @@ -62,7 +62,7 @@ class TestCompositorNode(unittest.TestCase): def setUp(self): """Set up the test case.""" - self.name = 'hej' + self.name = "hej" self.fake = FakeCompositor(self.name) self.c_node = CompositorNode(self.fake) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index db50900cad..8250f691a0 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -40,52 +40,52 @@ os.environ.pop("PPP_CONFIG_DIR", None) os.environ.pop("SATPY_CONFIG_PATH", None) -local_id_keys_config = {'name': { - 'required': True, +local_id_keys_config = {"name": { + "required": True, }, - 'wavelength': { - 'type': WavelengthRange, + "wavelength": { + "type": WavelengthRange, }, - 'resolution': None, - 'calibration': { - 'enum': [ - 'reflectance', - 'brightness_temperature', - 'radiance', - 'counts' + "resolution": None, + "calibration": { + "enum": [ + "reflectance", + "brightness_temperature", + "radiance", + "counts" ] }, - 'polarization': None, - 'level': None, - 'modifiers': { - 'required': True, - 'default': ModifierTuple(), - 'type': ModifierTuple, + "polarization": None, + "level": None, + "modifiers": { + "required": True, + "default": ModifierTuple(), + "type": ModifierTuple, }, } real_import = builtins.__import__ -@pytest.fixture +@pytest.fixture() def viirs_file(tmp_path, monkeypatch): """Create a dummy viirs file.""" - filename = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5' + filename = "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5" monkeypatch.chdir(tmp_path) # touch the file so it exists on disk - open(filename, 'w').close() + open(filename, "w").close() return filename -@pytest.fixture +@pytest.fixture() def atms_file(tmp_path, monkeypatch): """Create a dummy atms file.""" - filename = 'SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5' + filename = "SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5" monkeypatch.chdir(tmp_path) # touch the file so it exists on disk - open(filename, 'w').close() + open(filename, "w").close() return filename @@ -120,11 +120,11 @@ def setUp(self): calibration="reflectance", polarization="H"): "4refl", make_dataid(name="test5", - modifiers=('mod1', 'mod2')): "5_2mod", + modifiers=("mod1", "mod2")): "5_2mod", make_dataid(name="test5", - modifiers=('mod2',)): "5_1mod", - make_dataid(name='test6', level=100): '6_100', - make_dataid(name='test6', level=200): '6_200', + modifiers=("mod2",)): "5_1mod", + make_dataid(name="test6", level=100): "6_100", + make_dataid(name="test6", level=200): "6_200", } self.test_dict = DatasetDict(regular_dict) @@ -132,121 +132,115 @@ def test_init_noargs(self): """Test DatasetDict init with no arguments.""" from satpy import DatasetDict d = DatasetDict() - self.assertIsInstance(d, dict) + assert isinstance(d, dict) def test_init_dict(self): """Test DatasetDict init with a regular dict argument.""" from satpy import DatasetDict regular_dict = {make_dataid(name="test", wavelength=(0, 0.5, 1)): "1", } d = DatasetDict(regular_dict) - self.assertEqual(d, regular_dict) + assert d == regular_dict def test_getitem(self): """Test DatasetDict getitem with different arguments.""" from satpy.tests.utils import make_dsq d = self.test_dict # access by name - self.assertEqual(d["test"], "1") + assert d["test"] == "1" # access by exact wavelength - self.assertEqual(d[1.5], "2") + assert d[1.5] == "2" # access by near wavelength - self.assertEqual(d[1.55], "2") + assert d[1.55] == "2" # access by near wavelength of another dataset - self.assertEqual(d[1.65], "3") + assert d[1.65] == "3" # access by name with multiple levels - self.assertEqual(d['test6'], '6_100') + assert d["test6"] == "6_100" - self.assertEqual(d[make_dsq(wavelength=1.5)], "2") - self.assertEqual(d[make_dsq(wavelength=0.5, resolution=1000)], "1") - self.assertEqual(d[make_dsq(wavelength=0.5, resolution=500)], "1h") - self.assertEqual(d[make_dsq(name='test6', level=100)], '6_100') - self.assertEqual(d[make_dsq(name='test6', level=200)], '6_200') + assert d[make_dsq(wavelength=1.5)] == "2" + assert d[make_dsq(wavelength=0.5, resolution=1000)] == "1" + assert d[make_dsq(wavelength=0.5, resolution=500)] == "1h" + assert d[make_dsq(name="test6", level=100)] == "6_100" + assert d[make_dsq(name="test6", level=200)] == "6_200" # higher resolution is returned - self.assertEqual(d[0.5], "1h") - self.assertEqual(d['test4'], '4refl') - self.assertEqual(d[make_dataid(name='test4', calibration='radiance')], '4rad') - self.assertRaises(KeyError, d.getitem, '1h') + assert d[0.5] == "1h" + assert d["test4"] == "4refl" + assert d[make_dataid(name="test4", calibration="radiance")] == "4rad" + self.assertRaises(KeyError, d.getitem, "1h") # test with full tuple - self.assertEqual(d[make_dsq(name='test', wavelength=(0, 0.5, 1), resolution=1000)], "1") + assert d[make_dsq(name="test", wavelength=(0, 0.5, 1), resolution=1000)] == "1" def test_get_key(self): """Test 'get_key' special functions.""" from satpy.dataset import DataQuery d = self.test_dict - res1 = get_key(make_dataid(name='test4'), d, calibration='radiance') - res2 = get_key(make_dataid(name='test4'), d, calibration='radiance', + res1 = get_key(make_dataid(name="test4"), d, calibration="radiance") + res2 = get_key(make_dataid(name="test4"), d, calibration="radiance", num_results=0) - res3 = get_key(make_dataid(name='test4'), d, calibration='radiance', + res3 = get_key(make_dataid(name="test4"), d, calibration="radiance", num_results=3) - self.assertEqual(len(res2), 1) - self.assertEqual(len(res3), 1) + assert len(res2) == 1 + assert len(res3) == 1 res2 = res2[0] res3 = res3[0] - self.assertEqual(res1, res2) - self.assertEqual(res1, res3) - res1 = get_key('test4', d, query=DataQuery(polarization='V')) - self.assertEqual(res1, make_dataid(name='test4', calibration='radiance', - polarization='V')) + assert res1 == res2 + assert res1 == res3 + res1 = get_key("test4", d, query=DataQuery(polarization="V")) + assert res1 == make_dataid(name="test4", calibration="radiance", polarization="V") res1 = get_key(0.5, d, query=DataQuery(resolution=500)) - self.assertEqual(res1, make_dataid(name='testh', - wavelength=(0, 0.5, 1), - resolution=500)) - - res1 = get_key('test6', d, query=DataQuery(level=100)) - self.assertEqual(res1, make_dataid(name='test6', - level=100)) - - res1 = get_key('test5', d) - res2 = get_key('test5', d, query=DataQuery(modifiers=('mod2',))) - res3 = get_key('test5', d, query=DataQuery(modifiers=('mod1', 'mod2',))) - self.assertEqual(res1, make_dataid(name='test5', - modifiers=('mod2',))) - self.assertEqual(res1, res2) - self.assertNotEqual(res1, res3) + assert res1 == make_dataid(name="testh", wavelength=(0, 0.5, 1), resolution=500) + + res1 = get_key("test6", d, query=DataQuery(level=100)) + assert res1 == make_dataid(name="test6", level=100) + + res1 = get_key("test5", d) + res2 = get_key("test5", d, query=DataQuery(modifiers=("mod2",))) + res3 = get_key("test5", d, query=DataQuery(modifiers=("mod1", "mod2",))) + assert res1 == make_dataid(name="test5", modifiers=("mod2",)) + assert res1 == res2 + assert res1 != res3 # more than 1 result when default is to ask for 1 result - self.assertRaises(KeyError, get_key, 'test4', d, best=False) + self.assertRaises(KeyError, get_key, "test4", d, best=False) def test_contains(self): """Test DatasetDict contains method.""" d = self.test_dict - self.assertIn('test', d) - self.assertFalse(d.contains('test')) - self.assertNotIn('test_bad', d) - self.assertIn(0.5, d) - self.assertFalse(d.contains(0.5)) - self.assertIn(1.5, d) - self.assertIn(1.55, d) - self.assertIn(1.65, d) - self.assertIn(make_dataid(name='test4', calibration='radiance'), d) - self.assertIn('test4', d) + assert "test" in d + assert not d.contains("test") + assert "test_bad" not in d + assert 0.5 in d + assert not d.contains(0.5) + assert 1.5 in d + assert 1.55 in d + assert 1.65 in d + assert make_dataid(name="test4", calibration="radiance") in d + assert "test4" in d def test_keys(self): """Test keys method of DatasetDict.""" from satpy.tests.utils import DataID d = self.test_dict - self.assertEqual(len(d.keys()), len(self.regular_dict.keys())) - self.assertTrue(all(isinstance(x, DataID) for x in d.keys())) + assert len(d.keys()) == len(self.regular_dict.keys()) + assert all(isinstance(x, DataID) for x in d.keys()) name_keys = d.keys(names=True) - self.assertListEqual(sorted(set(name_keys))[:4], [ - 'test', 'test2', 'test3', 'test4']) + assert sorted(set(name_keys))[:4] == ["test", "test2", "test3", "test4"] wl_keys = tuple(d.keys(wavelengths=True)) - self.assertIn((0, 0.5, 1), wl_keys) - self.assertIn((1, 1.5, 2, 'µm'), wl_keys) - self.assertIn((1.2, 1.7, 2.2, 'µm'), wl_keys) - self.assertIn(None, wl_keys) + assert (0, 0.5, 1) in wl_keys + assert (1, 1.5, 2, "µm") in wl_keys + assert (1.2, 1.7, 2.2, "µm") in wl_keys + assert None in wl_keys def test_setitem(self): """Test setitem method of DatasetDict.""" d = self.test_dict - d['new_ds'] = {'metadata': 'new_ds'} - self.assertEqual(d['new_ds']['metadata'], 'new_ds') - d[0.5] = {'calibration': 'radiance'} - self.assertEqual(d[0.5]['resolution'], 500) - self.assertEqual(d[0.5]['name'], 'testh') + d["new_ds"] = {"metadata": "new_ds"} + assert d["new_ds"]["metadata"] == "new_ds" + d[0.5] = {"calibration": "radiance"} + assert d[0.5]["resolution"] == 500 + assert d[0.5]["name"] == "testh" class TestReaderLoader(unittest.TestCase): @@ -261,7 +255,7 @@ def setUp(self): from satpy.tests.reader_tests.test_viirs_sdr import FakeHDF5FileHandler2 # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(VIIRSSDRFileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -276,26 +270,26 @@ def test_no_args(self): """ from satpy.readers import load_readers ri = load_readers() - self.assertDictEqual(ri, {}) + assert ri == {} def test_filenames_only(self): """Test with filenames specified.""" from satpy.readers import load_readers - ri = load_readers(filenames=['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5']) - self.assertListEqual(list(ri.keys()), ['viirs_sdr']) + ri = load_readers(filenames=["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"]) + assert list(ri.keys()) == ["viirs_sdr"] def test_filenames_and_reader(self): """Test with filenames and reader specified.""" from satpy.readers import load_readers - ri = load_readers(reader='viirs_sdr', - filenames=['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5']) - self.assertListEqual(list(ri.keys()), ['viirs_sdr']) + ri = load_readers(reader="viirs_sdr", + filenames=["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"]) + assert list(ri.keys()) == ["viirs_sdr"] def test_bad_reader_name_with_filenames(self): """Test bad reader name with filenames provided.""" from satpy.readers import load_readers - self.assertRaises(ValueError, load_readers, reader='i_dont_exist', filenames=[ - 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5', + self.assertRaises(ValueError, load_readers, reader="i_dont_exist", filenames=[ + "SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5", ]) def test_filenames_as_path(self): @@ -304,28 +298,28 @@ def test_filenames_as_path(self): from satpy.readers import load_readers ri = load_readers(filenames=[ - Path('SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'), + Path("SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"), ]) - self.assertListEqual(list(ri.keys()), ['viirs_sdr']) + assert list(ri.keys()) == ["viirs_sdr"] def test_filenames_as_dict(self): """Test loading readers where filenames are organized by reader.""" from satpy.readers import load_readers filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], } ri = load_readers(filenames=filenames) - self.assertListEqual(list(ri.keys()), ['viirs_sdr']) + assert list(ri.keys()) == ["viirs_sdr"] def test_filenames_as_dict_bad_reader(self): """Test loading with filenames dict but one of the readers is bad.""" from satpy.readers import load_readers filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], - '__fake__': ['fake.txt'], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], + "__fake__": ["fake.txt"], } self.assertRaisesRegex(ValueError, - r'(?=.*__fake__)(?!.*viirs)(^No reader.+)', + r"(?=.*__fake__)(?!.*viirs)(^No reader.+)", load_readers, filenames=filenames) def test_filenames_as_dict_with_reader(self): @@ -337,63 +331,63 @@ def test_filenames_as_dict_with_reader(self): """ from satpy.readers import load_readers filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], } - ri = load_readers(reader='viirs_sdr', filenames=filenames) - self.assertListEqual(list(ri.keys()), ['viirs_sdr']) + ri = load_readers(reader="viirs_sdr", filenames=filenames) + assert list(ri.keys()) == ["viirs_sdr"] def test_empty_filenames_as_dict(self): """Test passing filenames as a dictionary with an empty list of filenames.""" # only one reader from satpy.readers import load_readers filenames = { - 'viirs_sdr': [], + "viirs_sdr": [], } self.assertRaises(ValueError, load_readers, filenames=filenames) # two readers, one is empty filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], - 'viirs_l1b': [], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], + "viirs_l1b": [], } ri = load_readers(filenames) - self.assertListEqual(list(ri.keys()), ['viirs_sdr']) - - @mock.patch('satpy.readers.hrit_base.HRITFileHandler._get_hd') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler._get_header') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.start_time') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.end_time') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler.read_prologue') - @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue') + assert list(ri.keys()) == ["viirs_sdr"] + + @mock.patch("satpy.readers.hrit_base.HRITFileHandler._get_hd") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler._get_header") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.start_time") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.end_time") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler.read_prologue") + @mock.patch("satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler.read_epilogue") def test_missing_requirements(self, *mocks): """Test warnings and exceptions in case of missing requirements.""" from satpy.readers import load_readers # Filenames from a single scan - epi_pro_miss = ['H-000-MSG4__-MSG4________-IR_108___-000006___-201809050900-__'] - epi_miss = epi_pro_miss + ['H-000-MSG4__-MSG4________-_________-PRO______-201809050900-__'] - pro_miss = epi_pro_miss + ['H-000-MSG4__-MSG4________-_________-EPI______-201809050900-__'] + epi_pro_miss = ["H-000-MSG4__-MSG4________-IR_108___-000006___-201809050900-__"] + epi_miss = epi_pro_miss + ["H-000-MSG4__-MSG4________-_________-PRO______-201809050900-__"] + pro_miss = epi_pro_miss + ["H-000-MSG4__-MSG4________-_________-EPI______-201809050900-__"] with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=r"No handler for reading requirement.*", category=UserWarning) for filenames in [epi_miss, pro_miss, epi_pro_miss]: - self.assertRaises(ValueError, load_readers, reader='seviri_l1b_hrit', filenames=filenames) + self.assertRaises(ValueError, load_readers, reader="seviri_l1b_hrit", filenames=filenames) # Filenames from multiple scans at_least_one_complete = [ # 09:00 scan is ok - 'H-000-MSG4__-MSG4________-IR_108___-000006___-201809050900-__', - 'H-000-MSG4__-MSG4________-_________-PRO______-201809050900-__', - 'H-000-MSG4__-MSG4________-_________-EPI______-201809050900-__', + "H-000-MSG4__-MSG4________-IR_108___-000006___-201809050900-__", + "H-000-MSG4__-MSG4________-_________-PRO______-201809050900-__", + "H-000-MSG4__-MSG4________-_________-EPI______-201809050900-__", # 10:00 scan is incomplete - 'H-000-MSG4__-MSG4________-IR_108___-000006___-201809051000-__', + "H-000-MSG4__-MSG4________-IR_108___-000006___-201809051000-__", ] with warnings.catch_warnings(): warnings.filterwarnings("ignore", message=r"No matching requirement file.*", category=UserWarning) try: - load_readers(filenames=at_least_one_complete, reader='seviri_l1b_hrit') + load_readers(filenames=at_least_one_complete, reader="seviri_l1b_hrit") except ValueError: - self.fail('If at least one set of filenames is complete, no ' - 'exception should be raised') + self.fail("If at least one set of filenames is complete, no " + "exception should be raised") def test_all_filtered(self): """Test behaviour if no file matches the filter parameters.""" @@ -401,13 +395,13 @@ def test_all_filtered(self): from satpy.readers import load_readers filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], } - filter_params = {'start_time': datetime.datetime(1970, 1, 1), - 'end_time': datetime.datetime(1970, 1, 2), - 'area': None} + filter_params = {"start_time": datetime.datetime(1970, 1, 1), + "end_time": datetime.datetime(1970, 1, 2), + "area": None} self.assertRaises(ValueError, load_readers, - filenames=filenames, reader_kwargs={'filter_parameters': filter_params}) + filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) def test_all_filtered_multiple(self): """Test behaviour if no file matches the filter parameters.""" @@ -415,13 +409,13 @@ def test_all_filtered_multiple(self): from satpy.readers import load_readers filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], - 'abi_l1b': ['OR_ABI-L1b-RadF-M3C01_G16_s20120561730408_e20120561741175_c20172631741218.nc'], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], + "abi_l1b": ["OR_ABI-L1b-RadF-M3C01_G16_s20120561730408_e20120561741175_c20172631741218.nc"], } - filter_params = {'start_time': datetime.datetime(1970, 1, 1), - 'end_time': datetime.datetime(1970, 1, 2)} + filter_params = {"start_time": datetime.datetime(1970, 1, 1), + "end_time": datetime.datetime(1970, 1, 2)} self.assertRaises(ValueError, load_readers, - filenames=filenames, reader_kwargs={'filter_parameters': filter_params}) + filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) def test_almost_all_filtered(self): """Test behaviour if only one reader has datasets.""" @@ -429,17 +423,17 @@ def test_almost_all_filtered(self): from satpy.readers import load_readers filenames = { - 'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'], - 'abi_l1b': ['OR_ABI-L1b-RadF-M3C01_G16_s20172631730408_e20172631741175_c20172631741218.nc'], + "viirs_sdr": ["SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5"], + "abi_l1b": ["OR_ABI-L1b-RadF-M3C01_G16_s20172631730408_e20172631741175_c20172631741218.nc"], } - filter_params = {'start_time': datetime.datetime(2012, 2, 25), - 'end_time': datetime.datetime(2012, 2, 26)} + filter_params = {"start_time": datetime.datetime(2012, 2, 25), + "end_time": datetime.datetime(2012, 2, 26)} # viirs has data that matches the request, abi doesn't - readers = load_readers(filenames=filenames, reader_kwargs={'filter_parameters': filter_params}) - self.assertIn('viirs_sdr', readers) + readers = load_readers(filenames=filenames, reader_kwargs={"filter_parameters": filter_params}) + assert "viirs_sdr" in readers # abi_l1b reader was created, but no datasets available - self.assertIn('abi_l1b', readers) - self.assertEqual(len(list(readers['abi_l1b'].available_dataset_ids)), 0) + assert "abi_l1b" in readers + assert len(list(readers["abi_l1b"].available_dataset_ids)) == 0 class TestFindFilesAndReaders: @@ -451,7 +445,7 @@ def setup_method(self): from satpy.tests.reader_tests.test_viirs_sdr import FakeHDF5FileHandler2 # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.p = mock.patch.object(VIIRSSDRFileHandler, "__bases__", (FakeHDF5FileHandler2,)) self.fake_handler = self.p.start() self.p.is_local = True @@ -461,31 +455,31 @@ def teardown_method(self): def test_reader_name(self, viirs_file): """Test with default base_dir and reader specified.""" - ri = find_files_and_readers(reader='viirs_sdr') - assert list(ri.keys()) == ['viirs_sdr'] - assert ri['viirs_sdr'] == [viirs_file] + ri = find_files_and_readers(reader="viirs_sdr") + assert list(ri.keys()) == ["viirs_sdr"] + assert ri["viirs_sdr"] == [viirs_file] def test_reader_other_name(self, monkeypatch, tmp_path): """Test with default base_dir and reader specified.""" - filename = 'S_NWC_CPP_npp_32505_20180204T1114116Z_20180204T1128227Z.nc' + filename = "S_NWC_CPP_npp_32505_20180204T1114116Z_20180204T1128227Z.nc" monkeypatch.chdir(tmp_path) # touch the file so it exists on disk - open(filename, 'w').close() + open(filename, "w").close() - ri = find_files_and_readers(reader='nwcsaf-pps_nc') - assert list(ri.keys()) == ['nwcsaf-pps_nc'] - assert ri['nwcsaf-pps_nc'] == [filename] + ri = find_files_and_readers(reader="nwcsaf-pps_nc") + assert list(ri.keys()) == ["nwcsaf-pps_nc"] + assert ri["nwcsaf-pps_nc"] == [filename] def test_reader_name_matched_start_end_time(self, viirs_file): """Test with start and end time matching the filename.""" from datetime import datetime - ri = find_files_and_readers(reader='viirs_sdr', + ri = find_files_and_readers(reader="viirs_sdr", start_time=datetime(2012, 2, 25, 18, 0, 0), end_time=datetime(2012, 2, 25, 19, 0, 0), ) - assert list(ri.keys()) == ['viirs_sdr'] - assert ri['viirs_sdr'] == [viirs_file] + assert list(ri.keys()) == ["viirs_sdr"] + assert ri["viirs_sdr"] == [viirs_file] def test_reader_name_matched_start_time(self, viirs_file): """Test with start matching the filename. @@ -494,9 +488,9 @@ def test_reader_name_matched_start_time(self, viirs_file): """ from datetime import datetime - ri = find_files_and_readers(reader='viirs_sdr', start_time=datetime(2012, 2, 25, 18, 1, 30)) - assert list(ri.keys()) == ['viirs_sdr'] - assert ri['viirs_sdr'] == [viirs_file] + ri = find_files_and_readers(reader="viirs_sdr", start_time=datetime(2012, 2, 25, 18, 1, 30)) + assert list(ri.keys()) == ["viirs_sdr"] + assert ri["viirs_sdr"] == [viirs_file] def test_reader_name_matched_end_time(self, viirs_file): """Test with end matching the filename. @@ -506,16 +500,16 @@ def test_reader_name_matched_end_time(self, viirs_file): """ from datetime import datetime - ri = find_files_and_readers(reader='viirs_sdr', end_time=datetime(2012, 2, 25, 18, 1, 30)) - assert list(ri.keys()) == ['viirs_sdr'] - assert ri['viirs_sdr'] == [viirs_file] + ri = find_files_and_readers(reader="viirs_sdr", end_time=datetime(2012, 2, 25, 18, 1, 30)) + assert list(ri.keys()) == ["viirs_sdr"] + assert ri["viirs_sdr"] == [viirs_file] def test_reader_name_unmatched_start_end_time(self, viirs_file): """Test with start and end time matching the filename.""" from datetime import datetime - with pytest.raises(ValueError): - find_files_and_readers(reader='viirs_sdr', + with pytest.raises(ValueError, match="No supported files found"): + find_files_and_readers(reader="viirs_sdr", start_time=datetime(2012, 2, 26, 18, 0, 0), end_time=datetime(2012, 2, 26, 19, 0, 0)) @@ -524,8 +518,8 @@ def test_no_parameters(self, viirs_file): from satpy.readers import find_files_and_readers ri = find_files_and_readers() - assert list(ri.keys()) == ['viirs_sdr'] - assert ri['viirs_sdr'] == [viirs_file] + assert list(ri.keys()) == ["viirs_sdr"] + assert ri["viirs_sdr"] == [viirs_file] def test_no_parameters_both_atms_and_viirs(self, viirs_file, atms_file): """Test with no limiting parameters when there area both atms and viirs files in the same directory.""" @@ -533,31 +527,31 @@ def test_no_parameters_both_atms_and_viirs(self, viirs_file, atms_file): ri = find_files_and_readers() - assert 'atms_sdr_hdf5' in list(ri.keys()) - assert 'viirs_sdr' in list(ri.keys()) - assert ri['atms_sdr_hdf5'] == [atms_file] - assert ri['viirs_sdr'] == [viirs_file] + assert "atms_sdr_hdf5" in list(ri.keys()) + assert "viirs_sdr" in list(ri.keys()) + assert ri["atms_sdr_hdf5"] == [atms_file] + assert ri["viirs_sdr"] == [viirs_file] def test_bad_sensor(self): """Test bad sensor doesn't find any files.""" - with pytest.raises(ValueError): - find_files_and_readers(sensor='i_dont_exist') + with pytest.raises(ValueError, match="Sensor.* not supported by any readers"): + find_files_and_readers(sensor="i_dont_exist") def test_sensor(self, viirs_file): """Test that readers for the current sensor are loaded.""" # we can't easily know how many readers satpy has that support # 'viirs' so we just pass it and hope that this works - ri = find_files_and_readers(sensor='viirs') - assert list(ri.keys()) == ['viirs_sdr'] - assert ri['viirs_sdr'] == [viirs_file] + ri = find_files_and_readers(sensor="viirs") + assert list(ri.keys()) == ["viirs_sdr"] + assert ri["viirs_sdr"] == [viirs_file] def test_sensor_no_files(self): """Test that readers for the current sensor are loaded.""" # we can't easily know how many readers satpy has that support # 'viirs' so we just pass it and hope that this works - with pytest.raises(ValueError): - find_files_and_readers(sensor='viirs') - assert find_files_and_readers(sensor='viirs', missing_ok=True) == {} + with pytest.raises(ValueError, match="No supported files found"): + find_files_and_readers(sensor="viirs") + assert find_files_and_readers(sensor="viirs", missing_ok=True) == {} def test_reader_load_failed(self): """Test that an exception is raised when a reader can't be loaded.""" @@ -566,10 +560,10 @@ def test_reader_load_failed(self): from satpy.readers import find_files_and_readers # touch the file so it exists on disk - with mock.patch('yaml.load') as load: + with mock.patch("yaml.load") as load: load.side_effect = yaml.YAMLError("Import problems") with pytest.raises(yaml.YAMLError): - find_files_and_readers(reader='viirs_sdr') + find_files_and_readers(reader="viirs_sdr") def test_pending_old_reader_name_mapping(self): """Test that requesting pending old reader names raises a warning.""" @@ -589,7 +583,7 @@ def test_old_reader_name_mapping(self): return pytest.skip("Skipping deprecated reader tests because " "no deprecated readers.") test_reader = sorted(OLD_READER_NAMES.keys())[0] - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Reader name .* has been deprecated, use .* instead."): get_valid_reader_names([test_reader]) @@ -602,17 +596,17 @@ def test_filename_matches_reader_name(self): class IgnoreLoader(yaml.SafeLoader): def _ignore_all_tags(self, tag_suffix, node): - return tag_suffix + ' ' + node.value - IgnoreLoader.add_multi_constructor('', IgnoreLoader._ignore_all_tags) + return tag_suffix + " " + node.value + IgnoreLoader.add_multi_constructor("", IgnoreLoader._ignore_all_tags) from satpy._config import glob_config from satpy.readers import read_reader_config - for reader_config in glob_config('readers/*.yaml'): + for reader_config in glob_config("readers/*.yaml"): reader_fn = os.path.basename(reader_config) reader_fn_name = os.path.splitext(reader_fn)[0] reader_info = read_reader_config([reader_config], loader=IgnoreLoader) - assert reader_fn_name == reader_info['name'], \ + assert reader_fn_name == reader_info["name"], \ "Reader YAML filename doesn't match reader name in the YAML file." def test_available_readers(self): @@ -622,16 +616,16 @@ def test_available_readers(self): reader_names = available_readers() assert len(reader_names) > 0 assert isinstance(reader_names[0], str) - assert 'viirs_sdr' in reader_names # needs h5py - assert 'abi_l1b' in reader_names # needs netcdf4 + assert "viirs_sdr" in reader_names # needs h5py + assert "abi_l1b" in reader_names # needs netcdf4 assert reader_names == sorted(reader_names) reader_infos = available_readers(as_dict=True) assert len(reader_names) == len(reader_infos) assert isinstance(reader_infos[0], dict) for reader_info in reader_infos: - assert 'name' in reader_info - assert reader_infos == sorted(reader_infos, key=lambda reader_info: reader_info['name']) + assert "name" in reader_info + assert reader_infos == sorted(reader_infos, key=lambda reader_info: reader_info["name"]) def test_available_readers_base_loader(self, monkeypatch): """Test the 'available_readers' function for yaml loader type BaseLoader.""" @@ -641,20 +635,20 @@ def test_available_readers_base_loader(self, monkeypatch): from satpy._config import glob_config def patched_import_error(name, globals=None, locals=None, fromlist=(), level=0): - if name in ('netcdf4', ): + if name in ("netcdf4", ): raise ImportError(f"Mocked import error {name}") return real_import(name, globals=globals, locals=locals, fromlist=fromlist, level=level) - monkeypatch.delitem(sys.modules, 'netcdf4', raising=False) - monkeypatch.setattr(builtins, '__import__', patched_import_error) + monkeypatch.delitem(sys.modules, "netcdf4", raising=False) + monkeypatch.setattr(builtins, "__import__", patched_import_error) with pytest.raises(ImportError): import netcdf4 # noqa: F401 reader_names = available_readers(yaml_loader=yaml.BaseLoader) - assert 'abi_l1b' in reader_names # needs netcdf4 - assert 'viirs_l1b' in reader_names - assert len(reader_names) == len(list(glob_config('readers/*.yaml'))) + assert "abi_l1b" in reader_names # needs netcdf4 + assert "viirs_l1b" in reader_names + assert len(reader_names) == len(list(glob_config("readers/*.yaml"))) class TestGroupFiles(unittest.TestCase): @@ -677,7 +671,7 @@ def setUp(self): "OR_ABI-L1b-RadC-M3C02_G16_s20171171527203_e20171171529576_c20171171530008.nc", ] self.g16_files = input_files - self.g17_files = [x.replace('G16', 'G17') for x in input_files] + self.g17_files = [x.replace("G16", "G17") for x in input_files] self.noaa20_files = [ "GITCO_j01_d20180511_t2027292_e2028538_b02476_c20190530192858056873_noac_ops.h5", "GITCO_j01_d20180511_t2028550_e2030195_b02476_c20190530192932937427_noac_ops.h5", @@ -714,12 +708,12 @@ def test_no_reader(self): # without files it's going to be an empty result assert group_files([]) == [] groups = group_files(self.g16_files) - self.assertEqual(6, len(groups)) + assert 6 == len(groups) def test_unknown_files(self): """Test that error is raised on unknown files.""" from satpy.readers import group_files - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="No matching readers found for these files: .*"): group_files(self.unknown_files, "abi_l1b") def test_bad_reader(self): @@ -729,41 +723,41 @@ def test_bad_reader(self): from satpy.readers import group_files # touch the file so it exists on disk - with mock.patch('yaml.load') as load: + with mock.patch("yaml.load") as load: load.side_effect = yaml.YAMLError("Import problems") - self.assertRaises(yaml.YAMLError, group_files, [], reader='abi_l1b') + self.assertRaises(yaml.YAMLError, group_files, [], reader="abi_l1b") def test_default_behavior(self): """Test the default behavior with the 'abi_l1b' reader.""" from satpy.readers import group_files - groups = group_files(self.g16_files, reader='abi_l1b') - self.assertEqual(6, len(groups)) - self.assertEqual(2, len(groups[0]['abi_l1b'])) + groups = group_files(self.g16_files, reader="abi_l1b") + assert 6 == len(groups) + assert 2 == len(groups[0]["abi_l1b"]) def test_default_behavior_set(self): """Test the default behavior with the 'abi_l1b' reader.""" from satpy.readers import group_files files = set(self.g16_files) num_files = len(files) - groups = group_files(files, reader='abi_l1b') + groups = group_files(files, reader="abi_l1b") # we didn't modify it - self.assertEqual(len(files), num_files) - self.assertEqual(6, len(groups)) - self.assertEqual(2, len(groups[0]['abi_l1b'])) + assert len(files) == num_files + assert 6 == len(groups) + assert 2 == len(groups[0]["abi_l1b"]) def test_non_datetime_group_key(self): """Test what happens when the start_time isn't used for grouping.""" from satpy.readers import group_files - groups = group_files(self.g16_files, reader='abi_l1b', group_keys=('platform_shortname',)) - self.assertEqual(1, len(groups)) - self.assertEqual(12, len(groups[0]['abi_l1b'])) + groups = group_files(self.g16_files, reader="abi_l1b", group_keys=("platform_shortname",)) + assert 1 == len(groups) + assert 12 == len(groups[0]["abi_l1b"]) def test_large_time_threshold(self): """Test what happens when the time threshold holds multiple files.""" from satpy.readers import group_files - groups = group_files(self.g16_files, reader='abi_l1b', time_threshold=60*8) - self.assertEqual(3, len(groups)) - self.assertEqual(4, len(groups[0]['abi_l1b'])) + groups = group_files(self.g16_files, reader="abi_l1b", time_threshold=60*8) + assert 3 == len(groups) + assert 4 == len(groups[0]["abi_l1b"]) def test_two_instruments_files(self): """Test the behavior when two instruments files are provided. @@ -776,9 +770,9 @@ def test_two_instruments_files(self): """ from satpy.readers import group_files - groups = group_files(self.g16_files + self.g17_files, reader='abi_l1b', group_keys=('start_time',)) - self.assertEqual(6, len(groups)) - self.assertEqual(4, len(groups[0]['abi_l1b'])) + groups = group_files(self.g16_files + self.g17_files, reader="abi_l1b", group_keys=("start_time",)) + assert 6 == len(groups) + assert 4 == len(groups[0]["abi_l1b"]) def test_two_instruments_files_split(self): """Test the default behavior when two instruments files are provided and split. @@ -788,51 +782,51 @@ def test_two_instruments_files_split(self): """ from satpy.readers import group_files - groups = group_files(self.g16_files + self.g17_files, reader='abi_l1b', - group_keys=('start_time', 'platform_shortname')) - self.assertEqual(12, len(groups)) - self.assertEqual(2, len(groups[0]['abi_l1b'])) + groups = group_files(self.g16_files + self.g17_files, reader="abi_l1b", + group_keys=("start_time", "platform_shortname")) + assert 12 == len(groups) + assert 2 == len(groups[0]["abi_l1b"]) # default for abi_l1b should also behave like this - groups = group_files(self.g16_files + self.g17_files, reader='abi_l1b') - self.assertEqual(12, len(groups)) - self.assertEqual(2, len(groups[0]['abi_l1b'])) + groups = group_files(self.g16_files + self.g17_files, reader="abi_l1b") + assert 12 == len(groups) + assert 2 == len(groups[0]["abi_l1b"]) def test_viirs_orbits(self): """Test a reader that doesn't use 'start_time' for default grouping.""" from satpy.readers import group_files - groups = group_files(self.noaa20_files + self.npp_files, reader='viirs_sdr') - self.assertEqual(2, len(groups)) + groups = group_files(self.noaa20_files + self.npp_files, reader="viirs_sdr") + assert 2 == len(groups) # the noaa-20 files will be first because the orbit number is smaller # 5 granules * 3 file types - self.assertEqual(5 * 3, len(groups[0]['viirs_sdr'])) + assert 5 * 3 == len(groups[0]["viirs_sdr"]) # 3 granules * 2 file types - self.assertEqual(6, len(groups[1]['viirs_sdr'])) + assert 6 == len(groups[1]["viirs_sdr"]) def test_viirs_override_keys(self): """Test overriding a group keys to add 'start_time'.""" from satpy.readers import group_files - groups = group_files(self.noaa20_files + self.npp_files, reader='viirs_sdr', - group_keys=('start_time', 'orbit', 'platform_shortname')) - self.assertEqual(8, len(groups)) - self.assertEqual(2, len(groups[0]['viirs_sdr'])) # NPP - self.assertEqual(2, len(groups[1]['viirs_sdr'])) # NPP - self.assertEqual(2, len(groups[2]['viirs_sdr'])) # NPP - self.assertEqual(3, len(groups[3]['viirs_sdr'])) # N20 - self.assertEqual(3, len(groups[4]['viirs_sdr'])) # N20 - self.assertEqual(3, len(groups[5]['viirs_sdr'])) # N20 - self.assertEqual(3, len(groups[6]['viirs_sdr'])) # N20 - self.assertEqual(3, len(groups[7]['viirs_sdr'])) # N20 + groups = group_files(self.noaa20_files + self.npp_files, reader="viirs_sdr", + group_keys=("start_time", "orbit", "platform_shortname")) + assert 8 == len(groups) + assert 2 == len(groups[0]["viirs_sdr"]) # NPP + assert 2 == len(groups[1]["viirs_sdr"]) # NPP + assert 2 == len(groups[2]["viirs_sdr"]) # NPP + assert 3 == len(groups[3]["viirs_sdr"]) # N20 + assert 3 == len(groups[4]["viirs_sdr"]) # N20 + assert 3 == len(groups[5]["viirs_sdr"]) # N20 + assert 3 == len(groups[6]["viirs_sdr"]) # N20 + assert 3 == len(groups[7]["viirs_sdr"]) # N20 # Ask for a larger time span with our groups - groups = group_files(self.noaa20_files + self.npp_files, reader='viirs_sdr', + groups = group_files(self.noaa20_files + self.npp_files, reader="viirs_sdr", time_threshold=60 * 60 * 2, - group_keys=('start_time', 'orbit', 'platform_shortname')) - self.assertEqual(2, len(groups)) + group_keys=("start_time", "orbit", "platform_shortname")) + assert 2 == len(groups) # NPP is first because it has an earlier time # 3 granules * 2 file types - self.assertEqual(6, len(groups[0]['viirs_sdr'])) + assert 6 == len(groups[0]["viirs_sdr"]) # 5 granules * 3 file types - self.assertEqual(5 * 3, len(groups[1]['viirs_sdr'])) + assert 5 * 3 == len(groups[1]["viirs_sdr"]) def test_multi_readers(self): """Test passing multiple readers.""" @@ -930,7 +924,7 @@ def test_multi_readers_empty_groups_passed(self): def test_multi_readers_invalid_parameter(self): """Verify that invalid missing parameter raises ValueError.""" from satpy.readers import group_files - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Invalid value for ``missing`` argument..*"): group_files( self._filenames_abi_glm, reader=["abi_l1b", "glm_l2"], @@ -946,14 +940,14 @@ def _generate_random_string(): def _assert_is_open_file_and_close(opened): try: - assert hasattr(opened, 'tell') + assert hasattr(opened, "tell") finally: opened.close() def _posixify_path(filename): drive, driveless_name = os.path.splitdrive(filename) - return driveless_name.replace('\\', '/') + return driveless_name.replace("\\", "/") class TestFSFile(unittest.TestCase): @@ -975,7 +969,7 @@ def setUp(self): self.local_filename2 = os.path.join(tempfile.gettempdir(), self.random_string2) Path(self.local_filename2).touch() self.zip_name = os.path.join(tempfile.gettempdir(), self.random_string2 + ".zip") - zip_file = zipfile.ZipFile(self.zip_name, 'w', zipfile.ZIP_DEFLATED) + zip_file = zipfile.ZipFile(self.zip_name, "w", zipfile.ZIP_DEFLATED) zip_file.write(self.local_filename2) zip_file.close() os.remove(self.local_filename2) @@ -1057,7 +1051,7 @@ def test_sorting_fsfiles(self): file2 = FSFile(self.local_filename) - extra_file = os.path.normpath('/somedir/bla') + extra_file = os.path.normpath("/somedir/bla") sorted_filenames = [os.fspath(file) for file in sorted([file1, file2, extra_file])] expected_filenames = sorted([extra_file, os.fspath(file1), os.fspath(file2)]) assert sorted_filenames == expected_filenames diff --git a/satpy/tests/test_regressions.py b/satpy/tests/test_regressions.py index f85d9c37be..1f0a4924f8 100644 --- a/satpy/tests/test_regressions.py +++ b/satpy/tests/test_regressions.py @@ -26,22 +26,22 @@ from satpy.tests.utils import make_dataid -abi_file_list = ['/data/OR_ABI-L1b-RadF-M3C01_G16_s20180722030423_e20180722041189_c20180722041235-118900_0.nc', - '/data/OR_ABI-L1b-RadF-M3C02_G16_s20180722030423_e20180722041190_c20180722041228-120000_0.nc', - '/data/OR_ABI-L1b-RadF-M3C03_G16_s20180722030423_e20180722041190_c20180722041237-119000_0.nc', - '/data/OR_ABI-L1b-RadF-M3C04_G16_s20180722030423_e20180722041189_c20180722041221.nc', - '/data/OR_ABI-L1b-RadF-M3C05_G16_s20180722030423_e20180722041190_c20180722041237-119101_0.nc', - '/data/OR_ABI-L1b-RadF-M3C06_G16_s20180722030423_e20180722041195_c20180722041227.nc', - '/data/OR_ABI-L1b-RadF-M3C07_G16_s20180722030423_e20180722041201_c20180722041238.nc', - '/data/OR_ABI-L1b-RadF-M3C08_G16_s20180722030423_e20180722041190_c20180722041238.nc', - '/data/OR_ABI-L1b-RadF-M3C09_G16_s20180722030423_e20180722041195_c20180722041256.nc', - '/data/OR_ABI-L1b-RadF-M3C10_G16_s20180722030423_e20180722041201_c20180722041250.nc', - '/data/OR_ABI-L1b-RadF-M3C11_G16_s20180722030423_e20180722041189_c20180722041254.nc', - '/data/OR_ABI-L1b-RadF-M3C12_G16_s20180722030423_e20180722041195_c20180722041256.nc', - '/data/OR_ABI-L1b-RadF-M3C13_G16_s20180722030423_e20180722041201_c20180722041259.nc', - '/data/OR_ABI-L1b-RadF-M3C14_G16_s20180722030423_e20180722041190_c20180722041258.nc', - '/data/OR_ABI-L1b-RadF-M3C15_G16_s20180722030423_e20180722041195_c20180722041259.nc', - '/data/OR_ABI-L1b-RadF-M3C16_G16_s20180722030423_e20180722041202_c20180722041259.nc'] +abi_file_list = ["/data/OR_ABI-L1b-RadF-M3C01_G16_s20180722030423_e20180722041189_c20180722041235-118900_0.nc", + "/data/OR_ABI-L1b-RadF-M3C02_G16_s20180722030423_e20180722041190_c20180722041228-120000_0.nc", + "/data/OR_ABI-L1b-RadF-M3C03_G16_s20180722030423_e20180722041190_c20180722041237-119000_0.nc", + "/data/OR_ABI-L1b-RadF-M3C04_G16_s20180722030423_e20180722041189_c20180722041221.nc", + "/data/OR_ABI-L1b-RadF-M3C05_G16_s20180722030423_e20180722041190_c20180722041237-119101_0.nc", + "/data/OR_ABI-L1b-RadF-M3C06_G16_s20180722030423_e20180722041195_c20180722041227.nc", + "/data/OR_ABI-L1b-RadF-M3C07_G16_s20180722030423_e20180722041201_c20180722041238.nc", + "/data/OR_ABI-L1b-RadF-M3C08_G16_s20180722030423_e20180722041190_c20180722041238.nc", + "/data/OR_ABI-L1b-RadF-M3C09_G16_s20180722030423_e20180722041195_c20180722041256.nc", + "/data/OR_ABI-L1b-RadF-M3C10_G16_s20180722030423_e20180722041201_c20180722041250.nc", + "/data/OR_ABI-L1b-RadF-M3C11_G16_s20180722030423_e20180722041189_c20180722041254.nc", + "/data/OR_ABI-L1b-RadF-M3C12_G16_s20180722030423_e20180722041195_c20180722041256.nc", + "/data/OR_ABI-L1b-RadF-M3C13_G16_s20180722030423_e20180722041201_c20180722041259.nc", + "/data/OR_ABI-L1b-RadF-M3C14_G16_s20180722030423_e20180722041190_c20180722041258.nc", + "/data/OR_ABI-L1b-RadF-M3C15_G16_s20180722030423_e20180722041195_c20180722041259.nc", + "/data/OR_ABI-L1b-RadF-M3C16_G16_s20180722030423_e20180722041202_c20180722041259.nc"] def generate_fake_abi_xr_dataset(filename, chunks=None, **kwargs): @@ -50,31 +50,31 @@ def generate_fake_abi_xr_dataset(filename, chunks=None, **kwargs): This is an incomplete copy of existing file structures. """ dataset = Dataset(attrs={ - 'time_coverage_start': '2018-03-13T20:30:42.3Z', - 'time_coverage_end': '2018-03-13T20:41:18.9Z', + "time_coverage_start": "2018-03-13T20:30:42.3Z", + "time_coverage_end": "2018-03-13T20:41:18.9Z", }) projection = DataArray( [-214748364], attrs={ - 'long_name': 'GOES-R ABI fixed grid projection', - 'grid_mapping_name': 'geostationary', - 'perspective_point_height': 35786023.0, - 'semi_major_axis': 6378137.0, - 'semi_minor_axis': 6356752.31414, - 'inverse_flattening': 298.2572221, - 'latitude_of_projection_origin': 0.0, - 'longitude_of_projection_origin': -75.0, - 'sweep_angle_axis': 'x' + "long_name": "GOES-R ABI fixed grid projection", + "grid_mapping_name": "geostationary", + "perspective_point_height": 35786023.0, + "semi_major_axis": 6378137.0, + "semi_minor_axis": 6356752.31414, + "inverse_flattening": 298.2572221, + "latitude_of_projection_origin": 0.0, + "longitude_of_projection_origin": -75.0, + "sweep_angle_axis": "x" }) - dataset['goes_imager_projection'] = projection + dataset["goes_imager_projection"] = projection - if 'C01' in filename or 'C03' in filename or 'C05' in filename: + if "C01" in filename or "C03" in filename or "C05" in filename: stop = 10847 step = 2 scale = 2.8e-05 offset = 0.151858 - elif 'C02' in filename: + elif "C02" in filename: stop = 21693 step = 4 scale = 1.4e-05 @@ -88,117 +88,117 @@ def generate_fake_abi_xr_dataset(filename, chunks=None, **kwargs): y = DataArray( da.arange(0, stop, step), attrs={ - 'scale_factor': -scale, - 'add_offset': offset, - 'units': 'rad', - 'axis': 'Y', - 'long_name': 'GOES fixed grid projection y-coordinate', - 'standard_name': 'projection_y_coordinate' + "scale_factor": -scale, + "add_offset": offset, + "units": "rad", + "axis": "Y", + "long_name": "GOES fixed grid projection y-coordinate", + "standard_name": "projection_y_coordinate" }, - dims=['y']) + dims=["y"]) - dataset['y'] = y + dataset["y"] = y x = DataArray( da.arange(0, stop, step), attrs={ - 'scale_factor': scale, - 'add_offset': -offset, - 'units': 'rad', - 'axis': 'X', - 'long_name': 'GOES fixed grid projection x-coordinate', - 'standard_name': 'projection_x_coordinate' + "scale_factor": scale, + "add_offset": -offset, + "units": "rad", + "axis": "X", + "long_name": "GOES fixed grid projection x-coordinate", + "standard_name": "projection_x_coordinate" }, - dims=['x']) + dims=["x"]) - dataset['x'] = x + dataset["x"] = x rad = DataArray( da.random.randint(0, 1025, size=[len(y), len(x)], dtype=np.int16, chunks=chunks), attrs={ - '_FillValue': np.array(1023), - 'long_name': 'ABI L1b Radiances', - 'standard_name': 'toa_outgoing_radiance_per_unit_wavelength', - '_Unsigned': 'true', - 'sensor_band_bit_depth': 10, - 'valid_range': np.array([0, 1022], dtype=np.int16), - 'scale_factor': 0.8121064, - 'add_offset': -25.936647, - 'units': 'W m-2 sr-1 um-1', - 'resolution': 'y: 0.000028 rad x: 0.000028 rad', - 'grid_mapping': 'goes_imager_projection', - 'cell_methods': 't: point area: point' + "_FillValue": np.array(1023), + "long_name": "ABI L1b Radiances", + "standard_name": "toa_outgoing_radiance_per_unit_wavelength", + "_Unsigned": "true", + "sensor_band_bit_depth": 10, + "valid_range": np.array([0, 1022], dtype=np.int16), + "scale_factor": 0.8121064, + "add_offset": -25.936647, + "units": "W m-2 sr-1 um-1", + "resolution": "y: 0.000028 rad x: 0.000028 rad", + "grid_mapping": "goes_imager_projection", + "cell_methods": "t: point area: point" }, - dims=['y', 'x'] + dims=["y", "x"] ) - dataset['Rad'] = rad + dataset["Rad"] = rad sublat = DataArray(0.0, attrs={ - 'long_name': 'nominal satellite subpoint latitude (platform latitude)', - 'standard_name': 'latitude', - '_FillValue': -999.0, - 'units': 'degrees_north'}) - dataset['nominal_satellite_subpoint_lat'] = sublat + "long_name": "nominal satellite subpoint latitude (platform latitude)", + "standard_name": "latitude", + "_FillValue": -999.0, + "units": "degrees_north"}) + dataset["nominal_satellite_subpoint_lat"] = sublat sublon = DataArray(-75.0, attrs={ - 'long_name': 'nominal satellite subpoint longitude (platform longitude)', - 'standard_name': 'longitude', - '_FillValue': -999.0, - 'units': 'degrees_east'}) + "long_name": "nominal satellite subpoint longitude (platform longitude)", + "standard_name": "longitude", + "_FillValue": -999.0, + "units": "degrees_east"}) - dataset['nominal_satellite_subpoint_lon'] = sublon + dataset["nominal_satellite_subpoint_lon"] = sublon satheight = DataArray(35786.023, attrs={ - 'long_name': 'nominal satellite height above GRS 80 ellipsoid (platform altitude)', - 'standard_name': 'height_above_reference_ellipsoid', - '_FillValue': -999.0, - 'units': 'km'}) + "long_name": "nominal satellite height above GRS 80 ellipsoid (platform altitude)", + "standard_name": "height_above_reference_ellipsoid", + "_FillValue": -999.0, + "units": "km"}) - dataset['nominal_satellite_height'] = satheight + dataset["nominal_satellite_height"] = satheight yaw_flip_flag = DataArray(0, attrs={ - 'long_name': 'Flag indicating the spacecraft is operating in yaw flip configuration', - '_Unsigned': 'true', - '_FillValue': np.array(-1), - 'valid_range': np.array([0, 1], dtype=np.int8), - 'units': '1', - 'flag_values': '0 1', - 'flag_meanings': 'false true'}) + "long_name": "Flag indicating the spacecraft is operating in yaw flip configuration", + "_Unsigned": "true", + "_FillValue": np.array(-1), + "valid_range": np.array([0, 1], dtype=np.int8), + "units": "1", + "flag_values": "0 1", + "flag_meanings": "false true"}) - dataset['yaw_flip_flag'] = yaw_flip_flag + dataset["yaw_flip_flag"] = yaw_flip_flag return dataset -@patch('xarray.open_dataset') +@patch("xarray.open_dataset") def test_1258(fake_open_dataset): """Save true_color from abi with radiance doesn't need two resamplings.""" from satpy import Scene fake_open_dataset.side_effect = generate_fake_abi_xr_dataset - scene = Scene(abi_file_list, reader='abi_l1b') - scene.load(['true_color_nocorr', 'C04'], calibration='radiance') - resampled_scene = scene.resample(scene.coarsest_area(), resampler='native') + scene = Scene(abi_file_list, reader="abi_l1b") + scene.load(["true_color_nocorr", "C04"], calibration="radiance") + resampled_scene = scene.resample(scene.coarsest_area(), resampler="native") assert len(resampled_scene.keys()) == 2 -@patch('xarray.open_dataset') +@patch("xarray.open_dataset") def test_1088(fake_open_dataset): """Check that copied arrays gets resampled.""" from satpy import Scene fake_open_dataset.side_effect = generate_fake_abi_xr_dataset - scene = Scene(abi_file_list, reader='abi_l1b') - scene.load(['C04'], calibration='radiance') + scene = Scene(abi_file_list, reader="abi_l1b") + scene.load(["C04"], calibration="radiance") - my_id = make_dataid(name='my_name', wavelength=(10, 11, 12)) - scene[my_id] = scene['C04'].copy() - resampled = scene.resample('eurol') + my_id = make_dataid(name="my_name", wavelength=(10, 11, 12)) + scene[my_id] = scene["C04"].copy() + resampled = scene.resample("eurol") assert resampled[my_id].shape == (2048, 2560) -@patch('xarray.open_dataset') +@patch("xarray.open_dataset") def test_no_enums(fake_open_dataset): """Check that no enums are inserted in the resulting attrs.""" from enum import Enum @@ -206,7 +206,7 @@ def test_no_enums(fake_open_dataset): from satpy import Scene fake_open_dataset.side_effect = generate_fake_abi_xr_dataset - scene = Scene(abi_file_list, reader='abi_l1b') - scene.load(['C04'], calibration='radiance') - for value in scene['C04'].attrs.values(): + scene = Scene(abi_file_list, reader="abi_l1b") + scene.load(["C04"], calibration="radiance") + for value in scene["C04"].attrs.values(): assert not isinstance(value, Enum) diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py index ca9dd409cd..66e93009d2 100644 --- a/satpy/tests/test_resample.py +++ b/satpy/tests/test_resample.py @@ -32,7 +32,7 @@ def get_test_data(input_shape=(100, 50), output_shape=(200, 100), output_proj=None, - input_dims=('y', 'x')): + input_dims=("y", "x")): """Get common data objects used in testing. Returns: @@ -52,49 +52,49 @@ def get_test_data(input_shape=(100, 50), output_shape=(200, 100), output_proj=No from xarray import DataArray ds1 = DataArray(da.zeros(input_shape, chunks=85), dims=input_dims, - attrs={'name': 'test_data_name', 'test': 'test'}) - if input_dims and 'y' in input_dims: + attrs={"name": "test_data_name", "test": "test"}) + if input_dims and "y" in input_dims: ds1 = ds1.assign_coords(y=da.arange(input_shape[-2], chunks=85)) - if input_dims and 'x' in input_dims: + if input_dims and "x" in input_dims: ds1 = ds1.assign_coords(x=da.arange(input_shape[-1], chunks=85)) - if input_dims and 'bands' in input_dims: - ds1 = ds1.assign_coords(bands=list('RGBA'[:ds1.sizes['bands']])) + if input_dims and "bands" in input_dims: + ds1 = ds1.assign_coords(bands=list("RGBA"[:ds1.sizes["bands"]])) - input_proj_str = ('+proj=geos +lon_0=-95.0 +h=35786023.0 +a=6378137.0 ' - '+b=6356752.31414 +sweep=x +units=m +no_defs') + input_proj_str = ("+proj=geos +lon_0=-95.0 +h=35786023.0 +a=6378137.0 " + "+b=6356752.31414 +sweep=x +units=m +no_defs") source = AreaDefinition( - 'test_target', - 'test_target', - 'test_target', + "test_target", + "test_target", + "test_target", proj4_str_to_dict(input_proj_str), input_shape[1], # width input_shape[0], # height (-1000., -1500., 1000., 1500.)) - ds1.attrs['area'] = source + ds1.attrs["area"] = source crs = CRS.from_string(input_proj_str) ds1 = ds1.assign_coords(crs=crs) ds2 = ds1.copy() input_area_shape = tuple(ds1.sizes[dim] for dim in ds1.dims - if dim in ['y', 'x']) - geo_dims = ('y', 'x') if input_dims else None + if dim in ["y", "x"]) + geo_dims = ("y", "x") if input_dims else None lons = da.random.random(input_area_shape, chunks=50) lats = da.random.random(input_area_shape, chunks=50) swath_def = SwathDefinition( DataArray(lons, dims=geo_dims), DataArray(lats, dims=geo_dims)) - ds2.attrs['area'] = swath_def - crs = CRS.from_string('+proj=latlong +datum=WGS84 +ellps=WGS84') + ds2.attrs["area"] = swath_def + crs = CRS.from_string("+proj=latlong +datum=WGS84 +ellps=WGS84") ds2 = ds2.assign_coords(crs=crs) # set up target definition - output_proj_str = ('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') + output_proj_str = ("+proj=lcc +datum=WGS84 +ellps=WGS84 " + "+lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs") output_proj_str = output_proj or output_proj_str target = AreaDefinition( - 'test_target', - 'test_target', - 'test_target', + "test_target", + "test_target", + "test_target", proj4_str_to_dict(output_proj_str), output_shape[1], # width output_shape[0], # height @@ -111,31 +111,31 @@ def test_type_preserve(self): from pyresample.geometry import SwathDefinition from satpy.resample import resample_dataset - source_area = SwathDefinition(xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)), dims=['y', 'x']), - xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)), dims=['y', 'x'])) - dest_area = SwathDefinition(xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)) + .0001, dims=['y', 'x']), - xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)) + .0001, dims=['y', 'x'])) + source_area = SwathDefinition(xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)), dims=["y", "x"]), + xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)), dims=["y", "x"])) + dest_area = SwathDefinition(xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)) + .0001, dims=["y", "x"]), + xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)) + .0001, dims=["y", "x"])) expected_gap = np.array([[1, 2], [3, 255]]) - data = xr.DataArray(da.from_array(expected_gap, chunks=5), dims=['y', 'x']) - data.attrs['_FillValue'] = 255 - data.attrs['area'] = source_area + data = xr.DataArray(da.from_array(expected_gap, chunks=5), dims=["y", "x"]) + data.attrs["_FillValue"] = 255 + data.attrs["area"] = source_area res = resample_dataset(data, dest_area) - self.assertEqual(res.dtype, data.dtype) - self.assertTrue(np.all(res.values == expected_gap)) + assert res.dtype == data.dtype + assert np.all(res.values == expected_gap) expected_filled = np.array([[1, 2], [3, 3]]) res = resample_dataset(data, dest_area, radius_of_influence=1000000) - self.assertEqual(res.dtype, data.dtype) - self.assertTrue(np.all(res.values == expected_filled)) + assert res.dtype == data.dtype + assert np.all(res.values == expected_filled) class TestKDTreeResampler(unittest.TestCase): """Test the kd-tree resampler.""" - @mock.patch('satpy.resample.xr.Dataset') - @mock.patch('satpy.resample.zarr.open') - @mock.patch('satpy.resample.KDTreeResampler._create_cache_filename') - @mock.patch('pyresample.kd_tree.XArrayResamplerNN') + @mock.patch("satpy.resample.xr.Dataset") + @mock.patch("satpy.resample.zarr.open") + @mock.patch("satpy.resample.KDTreeResampler._create_cache_filename") + @mock.patch("pyresample.kd_tree.XArrayResamplerNN") def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, xr_dset): """Test the kd resampler.""" @@ -145,11 +145,11 @@ def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, xr_dset.return_value = mock_dset resampler = KDTreeResampler(source_swath, target_area) resampler.precompute( - mask=da.arange(5, chunks=5).astype(bool), cache_dir='.') + mask=da.arange(5, chunks=5).astype(bool), cache_dir=".") xr_resampler.assert_called_once() resampler.resampler.get_neighbour_info.assert_called() # swath definitions should not be cached - self.assertFalse(len(mock_dset.to_zarr.mock_calls), 0) + assert len(mock_dset.to_zarr.mock_calls) == 0 resampler.resampler.reset_mock() resampler = KDTreeResampler(source_area, target_area) @@ -159,15 +159,15 @@ def test_kd_resampling(self, xr_resampler, create_filename, zarr_open, try: the_dir = tempfile.mkdtemp() resampler = KDTreeResampler(source_area, target_area) - create_filename.return_value = os.path.join(the_dir, 'test_cache.zarr') + create_filename.return_value = os.path.join(the_dir, "test_cache.zarr") zarr_open.side_effect = ValueError() resampler.precompute(cache_dir=the_dir) # assert data was saved to the on-disk cache - self.assertEqual(len(mock_dset.to_zarr.mock_calls), 1) + assert len(mock_dset.to_zarr.mock_calls) == 1 # assert that zarr_open was called to try to zarr_open something from disk - self.assertEqual(len(zarr_open.mock_calls), 1) + assert len(zarr_open.mock_calls) == 1 # we should have cached things in-memory - self.assertEqual(len(resampler._index_caches), 1) + assert len(resampler._index_caches) == 1 nbcalls = len(resampler.resampler.get_neighbour_info.mock_calls) # test reusing the resampler zarr_open.side_effect = None @@ -188,20 +188,20 @@ def astype(self, dtype): distance_array=4) resampler.precompute(cache_dir=the_dir) # we already have things cached in-memory, no need to save again - self.assertEqual(len(mock_dset.to_zarr.mock_calls), 1) + assert len(mock_dset.to_zarr.mock_calls) == 1 # we already have things cached in-memory, don't need to load - self.assertEqual(len(zarr_open.mock_calls), 1) + assert len(zarr_open.mock_calls) == 1 # we should have cached things in-memory - self.assertEqual(len(resampler._index_caches), 1) - self.assertEqual(len(resampler.resampler.get_neighbour_info.mock_calls), nbcalls) + assert len(resampler._index_caches) == 1 + assert len(resampler.resampler.get_neighbour_info.mock_calls) == nbcalls # test loading saved resampler resampler = KDTreeResampler(source_area, target_area) resampler.precompute(cache_dir=the_dir) - self.assertEqual(len(zarr_open.mock_calls), 4) - self.assertEqual(len(resampler.resampler.get_neighbour_info.mock_calls), nbcalls) + assert len(zarr_open.mock_calls) == 4 + assert len(resampler.resampler.get_neighbour_info.mock_calls) == nbcalls # we should have cached things in-memory now - self.assertEqual(len(resampler._index_caches), 1) + assert len(resampler._index_caches) == 1 finally: shutil.rmtree(the_dir) @@ -236,7 +236,7 @@ def test_expand_reduce_aggregate_identity(self): @pytest.mark.parametrize("dim0_factor", [1. / 4, 0.333323423, 1.333323423]) def test_expand_reduce_aggregate_invalid(self, dim0_factor): """Test classmethod 'expand_reduce' fails when factor does not divide evenly.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="[Aggregation, Expand] .*"): NativeResampler._expand_reduce(self.d_arr, {0: dim0_factor, 1: 1.}) def test_expand_reduce_agg_rechunk(self): @@ -267,35 +267,35 @@ def test_expand_dims(self): assert new_data.shape == (200, 100) new_data2 = resampler.resample(ds1.compute()) np.testing.assert_equal(new_data.compute().data, new_data2.compute().data) - assert 'y' in new_data.coords - assert 'x' in new_data.coords - assert 'crs' in new_data.coords - assert isinstance(new_data.coords['crs'].item(), CRS) - assert 'lambert' in new_data.coords['crs'].item().coordinate_operation.method_name.lower() - assert new_data.coords['y'].attrs['units'] == 'meter' - assert new_data.coords['x'].attrs['units'] == 'meter' - assert target_area.crs == new_data.coords['crs'].item() + assert "y" in new_data.coords + assert "x" in new_data.coords + assert "crs" in new_data.coords + assert isinstance(new_data.coords["crs"].item(), CRS) + assert "lambert" in new_data.coords["crs"].item().coordinate_operation.method_name.lower() + assert new_data.coords["y"].attrs["units"] == "meter" + assert new_data.coords["x"].attrs["units"] == "meter" + assert target_area.crs == new_data.coords["crs"].item() def test_expand_dims_3d(self): """Test expanding native resampling with 3D data.""" ds1, source_area, _, _, target_area = get_test_data( - input_shape=(3, 100, 50), input_dims=('bands', 'y', 'x')) + input_shape=(3, 100, 50), input_dims=("bands", "y", "x")) # source geo def doesn't actually matter resampler = NativeResampler(source_area, target_area) new_data = resampler.resample(ds1) assert new_data.shape == (3, 200, 100) new_data2 = resampler.resample(ds1.compute()) np.testing.assert_equal(new_data.compute().data, new_data2.compute().data) - assert 'y' in new_data.coords - assert 'x' in new_data.coords - assert 'bands' in new_data.coords - np.testing.assert_equal(new_data.coords['bands'].values, ['R', 'G', 'B']) - assert 'crs' in new_data.coords - assert isinstance(new_data.coords['crs'].item(), CRS) - assert 'lambert' in new_data.coords['crs'].item().coordinate_operation.method_name.lower() - assert new_data.coords['y'].attrs['units'] == 'meter' - assert new_data.coords['x'].attrs['units'] == 'meter' - assert target_area.crs == new_data.coords['crs'].item() + assert "y" in new_data.coords + assert "x" in new_data.coords + assert "bands" in new_data.coords + np.testing.assert_equal(new_data.coords["bands"].values, ["R", "G", "B"]) + assert "crs" in new_data.coords + assert isinstance(new_data.coords["crs"].item(), CRS) + assert "lambert" in new_data.coords["crs"].item().coordinate_operation.method_name.lower() + assert new_data.coords["y"].attrs["units"] == "meter" + assert new_data.coords["x"].attrs["units"] == "meter" + assert target_area.crs == new_data.coords["crs"].item() def test_expand_without_dims(self): """Test expanding native resampling with no dimensions specified.""" @@ -306,10 +306,10 @@ def test_expand_without_dims(self): assert new_data.shape == (200, 100) new_data2 = resampler.resample(ds1.compute()) np.testing.assert_equal(new_data.compute().data, new_data2.compute().data) - assert 'crs' in new_data.coords - assert isinstance(new_data.coords['crs'].item(), CRS) - assert 'lambert' in new_data.coords['crs'].item().coordinate_operation.method_name.lower() - assert target_area.crs == new_data.coords['crs'].item() + assert "crs" in new_data.coords + assert isinstance(new_data.coords["crs"].item(), CRS) + assert "lambert" in new_data.coords["crs"].item().coordinate_operation.method_name.lower() + assert target_area.crs == new_data.coords["crs"].item() def test_expand_without_dims_4D(self): """Test expanding native resampling with 4D data with no dimensions specified.""" @@ -317,16 +317,16 @@ def test_expand_without_dims_4D(self): input_shape=(2, 3, 100, 50), input_dims=None) # source geo def doesn't actually matter resampler = NativeResampler(source_area, target_area) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Can only handle 2D or 3D arrays without dimensions."): resampler.resample(ds1) class TestBilinearResampler(unittest.TestCase): """Test the bilinear resampler.""" - @mock.patch('satpy.resample._move_existing_caches') - @mock.patch('satpy.resample.BilinearResampler._create_cache_filename') - @mock.patch('pyresample.bilinear.XArrayBilinearResampler') + @mock.patch("satpy.resample._move_existing_caches") + @mock.patch("satpy.resample.BilinearResampler._create_cache_filename") + @mock.patch("pyresample.bilinear.XArrayBilinearResampler") def test_bil_resampling(self, xr_resampler, create_filename, move_existing_caches): """Test the bilinear resampler.""" @@ -344,29 +344,29 @@ def test_bil_resampling(self, xr_resampler, create_filename, # Test that get_sample_from_bil_info is called properly fill_value = 8 resampler.resampler.get_sample_from_bil_info.return_value = \ - xr.DataArray(da.zeros(target_area.shape), dims=('y', 'x')) + xr.DataArray(da.zeros(target_area.shape), dims=("y", "x")) new_data = resampler.compute(data, fill_value=fill_value) resampler.resampler.get_sample_from_bil_info.assert_called_with( data, fill_value=fill_value, output_shape=target_area.shape) - self.assertIn('y', new_data.coords) - self.assertIn('x', new_data.coords) - self.assertIn('crs', new_data.coords) - self.assertIsInstance(new_data.coords['crs'].item(), CRS) - self.assertIn('lambert', new_data.coords['crs'].item().coordinate_operation.method_name.lower()) - self.assertEqual(new_data.coords['y'].attrs['units'], 'meter') - self.assertEqual(new_data.coords['x'].attrs['units'], 'meter') - self.assertEqual(target_area.crs, new_data.coords['crs'].item()) + assert "y" in new_data.coords + assert "x" in new_data.coords + assert "crs" in new_data.coords + assert isinstance(new_data.coords["crs"].item(), CRS) + assert "lambert" in new_data.coords["crs"].item().coordinate_operation.method_name.lower() + assert new_data.coords["y"].attrs["units"] == "meter" + assert new_data.coords["x"].attrs["units"] == "meter" + assert target_area.crs == new_data.coords["crs"].item() # Test that the resampling info is tried to read from the disk resampler = BilinearResampler(source_swath, target_area) - resampler.precompute(cache_dir='.') + resampler.precompute(cache_dir=".") resampler.resampler.load_resampling_info.assert_called() # Test caching the resampling info try: the_dir = tempfile.mkdtemp() resampler = BilinearResampler(source_area, target_area) - create_filename.return_value = os.path.join(the_dir, 'test_cache.zarr') + create_filename.return_value = os.path.join(the_dir, "test_cache.zarr") xr_resampler.return_value.load_resampling_info.side_effect = IOError resampler.precompute(cache_dir=the_dir) @@ -381,20 +381,20 @@ def test_bil_resampling(self, xr_resampler, create_filename, # we already have things cached in-memory, no need to save again resampler.resampler.save_resampling_info.assert_called_once() # we already have things cached in-memory, don't need to load - self.assertEqual(resampler.resampler.get_bil_info.call_count, nbcalls) + assert resampler.resampler.get_bil_info.call_count == nbcalls # test loading saved resampler resampler = BilinearResampler(source_area, target_area) resampler.precompute(cache_dir=the_dir) - self.assertEqual(resampler.resampler.load_resampling_info.call_count, 3) - self.assertEqual(resampler.resampler.get_bil_info.call_count, nbcalls) + assert resampler.resampler.load_resampling_info.call_count == 3 + assert resampler.resampler.get_bil_info.call_count == nbcalls resampler = BilinearResampler(source_area, target_area) resampler.precompute(cache_dir=the_dir) resampler.save_bil_info(cache_dir=the_dir) - zarr_file = os.path.join(the_dir, 'test_cache.zarr') + zarr_file = os.path.join(the_dir, "test_cache.zarr") # Save again faking the cache file already exists - with mock.patch('os.path.exists') as exists: + with mock.patch("os.path.exists") as exists: exists.return_value = True resampler.save_bil_info(cache_dir=the_dir) move_existing_caches.assert_called_once_with(the_dir, zarr_file) @@ -407,18 +407,16 @@ def test_move_existing_caches(self): try: the_dir = tempfile.mkdtemp() # Test that existing cache file is moved away - zarr_file = os.path.join(the_dir, 'test.zarr') - with open(zarr_file, 'w') as fid: - fid.write('42') + zarr_file = os.path.join(the_dir, "test.zarr") + with open(zarr_file, "w") as fid: + fid.write("42") from satpy.resample import _move_existing_caches _move_existing_caches(the_dir, zarr_file) - self.assertFalse(os.path.exists(zarr_file)) - self.assertTrue(os.path.exists( - os.path.join(the_dir, 'moved_by_satpy', - 'test.zarr'))) + assert not os.path.exists(zarr_file) + assert os.path.exists(os.path.join(the_dir, "moved_by_satpy", "test.zarr")) # Run again to see that the existing dir doesn't matter - with open(zarr_file, 'w') as fid: - fid.write('42') + with open(zarr_file, "w") as fid: + fid.write("42") _move_existing_caches(the_dir, zarr_file) finally: shutil.rmtree(the_dir) @@ -433,69 +431,65 @@ def test_area_def_coordinates(self): from satpy.resample import add_crs_xy_coords area_def = AreaDefinition( - 'test', 'test', 'test', {'proj': 'lcc', 'lat_1': 25, 'lat_0': 25}, + "test", "test", "test", {"proj": "lcc", "lat_1": 25, "lat_0": 25}, 100, 200, [-100, -100, 100, 100] ) data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), - attrs={'area': area_def}, - dims=('y', 'x'), + attrs={"area": area_def}, + dims=("y", "x"), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) - self.assertIn('y', new_data_arr.coords) - self.assertIn('x', new_data_arr.coords) - - self.assertIn('units', new_data_arr.coords['y'].attrs) - self.assertEqual( - new_data_arr.coords['y'].attrs['units'], 'meter') - self.assertIn('units', new_data_arr.coords['x'].attrs) - self.assertEqual( - new_data_arr.coords['x'].attrs['units'], 'meter') - self.assertIn('crs', new_data_arr.coords) - self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS) - self.assertEqual(area_def.crs, new_data_arr.coords['crs'].item()) + assert "y" in new_data_arr.coords + assert "x" in new_data_arr.coords + + assert "units" in new_data_arr.coords["y"].attrs + assert new_data_arr.coords["y"].attrs["units"] == "meter" + assert "units" in new_data_arr.coords["x"].attrs + assert new_data_arr.coords["x"].attrs["units"] == "meter" + assert "crs" in new_data_arr.coords + assert isinstance(new_data_arr.coords["crs"].item(), CRS) + assert area_def.crs == new_data_arr.coords["crs"].item() # already has coords data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), - attrs={'area': area_def}, - dims=('y', 'x'), - coords={'y': np.arange(2, 202), 'x': np.arange(100)} + attrs={"area": area_def}, + dims=("y", "x"), + coords={"y": np.arange(2, 202), "x": np.arange(100)} ) new_data_arr = add_crs_xy_coords(data_arr, area_def) - self.assertIn('y', new_data_arr.coords) - self.assertNotIn('units', new_data_arr.coords['y'].attrs) - self.assertIn('x', new_data_arr.coords) - self.assertNotIn('units', new_data_arr.coords['x'].attrs) - np.testing.assert_equal(new_data_arr.coords['y'], np.arange(2, 202)) + assert "y" in new_data_arr.coords + assert "units" not in new_data_arr.coords["y"].attrs + assert "x" in new_data_arr.coords + assert "units" not in new_data_arr.coords["x"].attrs + np.testing.assert_equal(new_data_arr.coords["y"], np.arange(2, 202)) - self.assertIn('crs', new_data_arr.coords) - self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS) - self.assertEqual(area_def.crs, new_data_arr.coords['crs'].item()) + assert "crs" in new_data_arr.coords + assert isinstance(new_data_arr.coords["crs"].item(), CRS) + assert area_def.crs == new_data_arr.coords["crs"].item() # lat/lon area area_def = AreaDefinition( - 'test', 'test', 'test', {'proj': 'latlong'}, + "test", "test", "test", {"proj": "latlong"}, 100, 200, [-100, -100, 100, 100] ) data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), - attrs={'area': area_def}, - dims=('y', 'x'), + attrs={"area": area_def}, + dims=("y", "x"), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) - self.assertIn('y', new_data_arr.coords) - self.assertIn('x', new_data_arr.coords) - - self.assertIn('units', new_data_arr.coords['y'].attrs) - self.assertEqual( - new_data_arr.coords['y'].attrs['units'], 'degrees_north') - self.assertIn('units', new_data_arr.coords['x'].attrs) - self.assertEqual( - new_data_arr.coords['x'].attrs['units'], 'degrees_east') - self.assertIn('crs', new_data_arr.coords) - self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS) - self.assertEqual(area_def.crs, new_data_arr.coords['crs'].item()) + assert "y" in new_data_arr.coords + assert "x" in new_data_arr.coords + + assert "units" in new_data_arr.coords["y"].attrs + assert new_data_arr.coords["y"].attrs["units"] == "degrees_north" + assert "units" in new_data_arr.coords["x"].attrs + assert new_data_arr.coords["x"].attrs["units"] == "degrees_east" + assert "crs" in new_data_arr.coords + assert isinstance(new_data_arr.coords["crs"].item(), CRS) + assert area_def.crs == new_data_arr.coords["crs"].item() def test_swath_def_coordinates(self): """Test coordinates being added with an SwathDefinition.""" @@ -504,15 +498,15 @@ def test_swath_def_coordinates(self): from satpy.resample import add_crs_xy_coords lons_data = da.random.random((200, 100), chunks=50) lats_data = da.random.random((200, 100), chunks=50) - lons = xr.DataArray(lons_data, attrs={'units': 'degrees_east'}, - dims=('y', 'x')) - lats = xr.DataArray(lats_data, attrs={'units': 'degrees_north'}, - dims=('y', 'x')) + lons = xr.DataArray(lons_data, attrs={"units": "degrees_east"}, + dims=("y", "x")) + lats = xr.DataArray(lats_data, attrs={"units": "degrees_north"}, + dims=("y", "x")) area_def = SwathDefinition(lons, lats) data_arr = xr.DataArray( da.zeros((200, 100), chunks=50), - attrs={'area': area_def}, - dims=('y', 'x'), + attrs={"area": area_def}, + dims=("y", "x"), ) new_data_arr = add_crs_xy_coords(data_arr, area_def) # See https://github.com/pydata/xarray/issues/3068 @@ -527,11 +521,11 @@ def test_swath_def_coordinates(self): # new_data_arr.coords['latitude'].attrs['units'], 'degrees_north') # self.assertIsInstance(new_data_arr.coords['latitude'].data, da.Array) - self.assertIn('crs', new_data_arr.coords) - crs = new_data_arr.coords['crs'].item() - self.assertIsInstance(crs, CRS) + assert "crs" in new_data_arr.coords + crs = new_data_arr.coords["crs"].item() + assert isinstance(crs, CRS) assert crs.is_geographic - self.assertIsInstance(new_data_arr.coords['crs'].item(), CRS) + assert isinstance(new_data_arr.coords["crs"].item(), CRS) class TestBucketAvg(unittest.TestCase): @@ -550,16 +544,16 @@ def setUp(self): def test_init(self): """Test bucket resampler initialization.""" - self.assertIsNone(self.bucket.resampler) - self.assertTrue(self.bucket.source_geo_def == self.source_geo_def) - self.assertTrue(self.bucket.target_geo_def == self.target_geo_def) + assert self.bucket.resampler is None + assert self.bucket.source_geo_def == self.source_geo_def + assert self.bucket.target_geo_def == self.target_geo_def - @mock.patch('pyresample.bucket.BucketResampler') + @mock.patch("pyresample.bucket.BucketResampler") def test_precompute(self, bucket): """Test bucket resampler precomputation.""" bucket.return_value = True self.bucket.precompute() - self.assertTrue(self.bucket.resampler) + assert self.bucket.resampler bucket.assert_called_once_with(self.target_geo_def, 1, 2) def _compute_mocked_bucket_avg(self, data, return_data=None, **kwargs): @@ -577,18 +571,18 @@ def test_compute(self): # 1D data data = da.ones((5,)) res = self._compute_mocked_bucket_avg(data, fill_value=2) - self.assertEqual(res.shape, (1, 5)) + assert res.shape == (1, 5) # 2D data data = da.ones((5, 5)) res = self._compute_mocked_bucket_avg(data, fill_value=2) - self.assertEqual(res.shape, (1, 5, 5)) + assert res.shape == (1, 5, 5) # 3D data data = da.ones((3, 5, 5)) self.bucket.resampler.get_average.return_value = data[0, :, :] res = self._compute_mocked_bucket_avg(data, return_data=data[0, :, :], fill_value=2) - self.assertEqual(res.shape, (3, 5, 5)) + assert res.shape == (3, 5, 5) - @mock.patch('satpy.resample.PR_USE_SKIPNA', True) + @mock.patch("satpy.resample.PR_USE_SKIPNA", True) def test_compute_and_use_skipna_handling(self): """Test bucket resampler computation and use skipna handling.""" data = da.ones((5,)) @@ -611,7 +605,7 @@ def test_compute_and_use_skipna_handling(self): fill_value=2, skipna=True) - @mock.patch('satpy.resample.PR_USE_SKIPNA', False) + @mock.patch("satpy.resample.PR_USE_SKIPNA", False) def test_compute_and_not_use_skipna_handling(self): """Test bucket resampler computation and not use skipna handling.""" data = da.ones((5,)) @@ -640,7 +634,7 @@ def test_compute_and_not_use_skipna_handling(self): fill_value=2, mask_all_nan=False) - @mock.patch('pyresample.bucket.BucketResampler') + @mock.patch("pyresample.bucket.BucketResampler") def test_resample(self, pyresample_bucket): """Test bucket resamplers resample method.""" self.bucket.resampler = mock.MagicMock() @@ -648,38 +642,38 @@ def test_resample(self, pyresample_bucket): self.bucket.compute = mock.MagicMock() # 1D input data - data = xr.DataArray(da.ones((5,)), dims=('foo'), attrs={'bar': 'baz'}) + data = xr.DataArray(da.ones((5,)), dims=("foo"), attrs={"bar": "baz"}) self.bucket.compute.return_value = da.ones((5, 5)) res = self.bucket.resample(data) self.bucket.precompute.assert_called_once() self.bucket.compute.assert_called_once() - self.assertEqual(res.shape, (5, 5)) - self.assertEqual(res.dims, ('y', 'x')) - self.assertTrue('bar' in res.attrs) - self.assertEqual(res.attrs['bar'], 'baz') + assert res.shape == (5, 5) + assert res.dims == ("y", "x") + assert "bar" in res.attrs + assert res.attrs["bar"] == "baz" # 2D input data - data = xr.DataArray(da.ones((5, 5)), dims=('foo', 'bar')) + data = xr.DataArray(da.ones((5, 5)), dims=("foo", "bar")) self.bucket.compute.return_value = da.ones((5, 5)) res = self.bucket.resample(data) - self.assertEqual(res.shape, (5, 5)) - self.assertEqual(res.dims, ('y', 'x')) + assert res.shape == (5, 5) + assert res.dims == ("y", "x") # 3D input data with 'bands' dim - data = xr.DataArray(da.ones((1, 5, 5)), dims=('bands', 'foo', 'bar'), - coords={'bands': ['L']}) + data = xr.DataArray(da.ones((1, 5, 5)), dims=("bands", "foo", "bar"), + coords={"bands": ["L"]}) self.bucket.compute.return_value = da.ones((1, 5, 5)) res = self.bucket.resample(data) - self.assertEqual(res.shape, (1, 5, 5)) - self.assertEqual(res.dims, ('bands', 'y', 'x')) - self.assertEqual(res.coords['bands'], ['L']) + assert res.shape == (1, 5, 5) + assert res.dims == ("bands", "y", "x") + assert res.coords["bands"] == ["L"] # 3D input data with misc dim names - data = xr.DataArray(da.ones((3, 5, 5)), dims=('foo', 'bar', 'baz')) + data = xr.DataArray(da.ones((3, 5, 5)), dims=("foo", "bar", "baz")) self.bucket.compute.return_value = da.ones((3, 5, 5)) res = self.bucket.resample(data) - self.assertEqual(res.shape, (3, 5, 5)) - self.assertEqual(res.dims, ('foo', 'bar', 'baz')) + assert res.shape == (3, 5, 5) + assert res.dims == ("foo", "bar", "baz") class TestBucketSum(unittest.TestCase): @@ -709,17 +703,17 @@ def test_compute(self): # 1D data data = da.ones((5,)) res = self._compute_mocked_bucket_sum(data) - self.assertEqual(res.shape, (1, 5)) + assert res.shape == (1, 5) # 2D data data = da.ones((5, 5)) res = self._compute_mocked_bucket_sum(data) - self.assertEqual(res.shape, (1, 5, 5)) + assert res.shape == (1, 5, 5) # 3D data data = da.ones((3, 5, 5)) res = self._compute_mocked_bucket_sum(data, return_data=data[0, :, :]) - self.assertEqual(res.shape, (3, 5, 5)) + assert res.shape == (3, 5, 5) - @mock.patch('satpy.resample.PR_USE_SKIPNA', True) + @mock.patch("satpy.resample.PR_USE_SKIPNA", True) def test_compute_and_use_skipna_handling(self): """Test bucket resampler computation and use skipna handling.""" data = da.ones((5,)) @@ -739,7 +733,7 @@ def test_compute_and_use_skipna_handling(self): data, skipna=True) - @mock.patch('satpy.resample.PR_USE_SKIPNA', False) + @mock.patch("satpy.resample.PR_USE_SKIPNA", False) def test_compute_and_not_use_skipna_handling(self): """Test bucket resampler computation and not use skipna handling.""" data = da.ones((5,)) @@ -794,16 +788,16 @@ def test_compute(self): data = da.ones((5,)) res = self._compute_mocked_bucket_count(data) self.bucket.resampler.get_count.assert_called_once_with() - self.assertEqual(res.shape, (1, 5)) + assert res.shape == (1, 5) # 2D data data = da.ones((5, 5)) res = self._compute_mocked_bucket_count(data) self.bucket.resampler.get_count.assert_called_once_with() - self.assertEqual(res.shape, (1, 5, 5)) + assert res.shape == (1, 5, 5) # 3D data data = da.ones((3, 5, 5)) res = self._compute_mocked_bucket_count(data, return_data=data[0, :, :]) - self.assertEqual(res.shape, (3, 5, 5)) + assert res.shape == (3, 5, 5) class TestBucketFraction(unittest.TestCase): @@ -843,7 +837,7 @@ def test_compute(self): with self.assertRaises(ValueError): _ = self.bucket.compute(data) - @mock.patch('pyresample.bucket.BucketResampler') + @mock.patch("pyresample.bucket.BucketResampler") def test_resample(self, pyresample_bucket): """Test fraction bucket resamplers resample method.""" self.bucket.resampler = mock.MagicMock() @@ -851,10 +845,10 @@ def test_resample(self, pyresample_bucket): self.bucket.compute = mock.MagicMock() # Fractions return a dict - data = xr.DataArray(da.ones((1, 5, 5)), dims=('bands', 'y', 'x')) + data = xr.DataArray(da.ones((1, 5, 5)), dims=("bands", "y", "x")) arr = da.ones((5, 5)) self.bucket.compute.return_value = {0: arr, 1: arr, 2: arr} res = self.bucket.resample(data) - self.assertTrue('categories' in res.coords) - self.assertTrue('categories' in res.dims) - self.assertTrue(np.all(res.coords['categories'] == np.array([0, 1, 2]))) + assert "categories" in res.coords + assert "categories" in res.dims + assert np.all(res.coords["categories"] == np.array([0, 1, 2])) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index ef6a359cdd..6f5db02087 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -162,21 +162,21 @@ class TestGetSatPos: def test_get_satpos(self, included_prefixes, preference, expected_result): """Test getting the satellite position.""" all_orb_params = { - 'nadir_longitude': 1, - 'satellite_actual_longitude': 1.1, - 'satellite_nominal_longitude': 1.2, - 'projection_longitude': 1.3, - 'nadir_latitude': 2, - 'satellite_actual_latitude': 2.1, - 'satellite_nominal_latitude': 2.2, - 'projection_latitude': 2.3, - 'satellite_actual_altitude': 3, - 'satellite_nominal_altitude': 3.1, - 'projection_altitude': 3.2 + "nadir_longitude": 1, + "satellite_actual_longitude": 1.1, + "satellite_nominal_longitude": 1.2, + "projection_longitude": 1.3, + "nadir_latitude": 2, + "satellite_actual_latitude": 2.1, + "satellite_nominal_latitude": 2.2, + "projection_latitude": 2.3, + "satellite_actual_altitude": 3, + "satellite_nominal_altitude": 3.1, + "projection_altitude": 3.2 } orb_params = {key: value for key, value in all_orb_params.items() if any(in_prefix in key for in_prefix in included_prefixes)} - data_arr = xr.DataArray((), attrs={'orbital_parameters': orb_params}) + data_arr = xr.DataArray((), attrs={"orbital_parameters": orb_params}) with warnings.catch_warnings(record=True) as caught_warnings: lon, lat, alt = get_satpos(data_arr, preference=preference) @@ -190,11 +190,11 @@ def test_get_satpos(self, included_prefixes, preference, expected_result): @pytest.mark.parametrize( "attrs", - ( + [ {}, - {'orbital_parameters': {'projection_longitude': 1}}, - {'satellite_altitude': 1} - ) + {"orbital_parameters": {"projection_longitude": 1}}, + {"satellite_altitude": 1} + ] ) def test_get_satpos_fails_with_informative_error(self, attrs): """Test that get_satpos raises an informative error message.""" @@ -242,7 +242,7 @@ def test_make_fake_scene(): "six": np.arange(25).reshape(5, 5) }) assert len(sc.keys()) == 1 - assert sc.keys().pop()['name'] == "six" + assert sc.keys().pop()["name"] == "six" assert sc["six"].attrs["area"].shape == (5, 5) sc = make_fake_scene({ "seven": np.arange(3 * 7).reshape(3, 7), @@ -276,15 +276,14 @@ def test_basic_check_satpy(self): def test_specific_check_satpy(self): """Test 'check_satpy' with specific features provided.""" from satpy.utils import check_satpy - with mock.patch('satpy.utils.print') as print_mock: - check_satpy(readers=['viirs_sdr'], extras=('cartopy', '__fake')) + with mock.patch("satpy.utils.print") as print_mock: + check_satpy(readers=["viirs_sdr"], extras=("cartopy", "__fake")) checked_fake = False for call in print_mock.mock_calls: - if len(call[1]) > 0 and '__fake' in call[1][0]: - self.assertNotIn('ok', call[1][1]) + if len(call[1]) > 0 and "__fake" in call[1][0]: + assert "ok" not in call[1][1] checked_fake = True - self.assertTrue(checked_fake, "Did not find __fake module " - "mentioned in checks") + assert checked_fake, "Did not find __fake module mentioned in checks" def test_debug_on(caplog): @@ -557,7 +556,7 @@ def test_convert_remote_files_to_fsspec_windows_paths(): assert res == filenames -@mock.patch('fsspec.open_files') +@mock.patch("fsspec.open_files") def test_convert_remote_files_to_fsspec_storage_options(open_files): """Test convertion of remote files to fsspec objects. @@ -566,7 +565,7 @@ def test_convert_remote_files_to_fsspec_storage_options(open_files): from satpy.utils import convert_remote_files_to_fsspec filenames = ["s3://tmp/file1.nc"] - storage_options = {'anon': True} + storage_options = {"anon": True} _ = convert_remote_files_to_fsspec(filenames, storage_options=storage_options) @@ -576,7 +575,7 @@ def test_convert_remote_files_to_fsspec_storage_options(open_files): def test_import_error_helper(): """Test the import error helper.""" module = "some_crazy_name_for_unknow_dependency_module" - with pytest.raises(ImportError) as err: + with pytest.raises(ImportError) as err: # noqa: PT012 with import_error_helper(module): import unknow_dependency_module # noqa assert module in str(err) diff --git a/satpy/tests/test_writers.py b/satpy/tests/test_writers.py index 986687b0d6..c2d049dae1 100644 --- a/satpy/tests/test_writers.py +++ b/satpy/tests/test_writers.py @@ -39,10 +39,10 @@ def test_to_image_1d(self): """Conversion to image.""" # 1D from satpy.writers import to_image - p = xr.DataArray(np.arange(25), dims=['y']) + p = xr.DataArray(np.arange(25), dims=["y"]) self.assertRaises(ValueError, to_image, p) - @mock.patch('satpy.writers.XRImage') + @mock.patch("satpy.writers.XRImage") def test_to_image_2d(self, mock_geoimage): """Conversion to image.""" from satpy.writers import to_image @@ -51,35 +51,35 @@ def test_to_image_2d(self, mock_geoimage): data = np.arange(25).reshape((5, 5)) p = xr.DataArray(data, attrs=dict(mode="L", fill_value=0, palette=[0, 1, 2, 3, 4, 5]), - dims=['y', 'x']) + dims=["y", "x"]) to_image(p) np.testing.assert_array_equal( data, mock_geoimage.call_args[0][0].values) mock_geoimage.reset_mock() - @mock.patch('satpy.writers.XRImage') + @mock.patch("satpy.writers.XRImage") def test_to_image_3d(self, mock_geoimage): """Conversion to image.""" # 3D from satpy.writers import to_image data = np.arange(75).reshape((3, 5, 5)) - p = xr.DataArray(data, dims=['bands', 'y', 'x']) - p['bands'] = ['R', 'G', 'B'] + p = xr.DataArray(data, dims=["bands", "y", "x"]) + p["bands"] = ["R", "G", "B"] to_image(p) np.testing.assert_array_equal(data[0], mock_geoimage.call_args[0][0][0]) np.testing.assert_array_equal(data[1], mock_geoimage.call_args[0][0][1]) np.testing.assert_array_equal(data[2], mock_geoimage.call_args[0][0][2]) - @mock.patch('satpy.writers.get_enhanced_image') + @mock.patch("satpy.writers.get_enhanced_image") def test_show(self, mock_get_image): """Check showing.""" from satpy.writers import show data = np.arange(25).reshape((5, 5)) - p = xr.DataArray(data, dims=['y', 'x']) + p = xr.DataArray(data, dims=["y", "x"]) show(p) - self.assertTrue(mock_get_image.return_value.show.called) + assert mock_get_image.return_value.show.called class TestEnhancer(unittest.TestCase): @@ -89,13 +89,13 @@ def test_basic_init_no_args(self): """Test Enhancer init with no arguments passed.""" from satpy.writers import Enhancer e = Enhancer() - self.assertIsNotNone(e.enhancement_tree) + assert e.enhancement_tree is not None def test_basic_init_no_enh(self): """Test Enhancer init requesting no enhancements.""" from satpy.writers import Enhancer e = Enhancer(enhancement_config_file=False) - self.assertIsNone(e.enhancement_tree) + assert e.enhancement_tree is None def test_basic_init_provided_enh(self): """Test Enhancer init with string enhancement configs.""" @@ -108,7 +108,7 @@ def test_basic_init_provided_enh(self): method: !!python/name:satpy.enhancements.stretch kwargs: {stretch: linear} """]) - self.assertIsNotNone(e.enhancement_tree) + assert e.enhancement_tree is not None def test_init_nonexistent_enh_file(self): """Test Enhancer init with a nonexistent enhancement configuration file.""" @@ -128,7 +128,7 @@ def setup_class(cls): base_dir = os.path.dirname(fn) if base_dir: os.makedirs(base_dir, exist_ok=True) - with open(fn, 'w') as f: + with open(fn, "w") as f: f.write(content) # create fake test image writer @@ -136,7 +136,7 @@ def setup_class(cls): class CustomImageWriter(ImageWriter): def __init__(self, **kwargs): - super(CustomImageWriter, self).__init__(name='test', config_files=[], **kwargs) + super(CustomImageWriter, self).__init__(name="test", config_files=[], **kwargs) self.img = None def save_image(self, img, **kwargs): @@ -148,7 +148,7 @@ def teardown_class(cls): """Remove fake user configurations.""" for fn, _content in cls.TEST_CONFIGS.items(): base_dir = os.path.dirname(fn) - if base_dir not in ['.', ''] and os.path.isdir(base_dir): + if base_dir not in [".", ""] and os.path.isdir(base_dir): shutil.rmtree(base_dir) elif os.path.isfile(fn): os.remove(fn) @@ -157,8 +157,8 @@ def teardown_class(cls): class TestComplexSensorEnhancerConfigs(_BaseCustomEnhancementConfigTests): """Test enhancement configs that use or expect multiple sensors.""" - ENH_FN = 'test_sensor1.yaml' - ENH_FN2 = 'test_sensor2.yaml' + ENH_FN = "test_sensor1.yaml" + ENH_FN2 = "test_sensor2.yaml" TEST_CONFIGS = { ENH_FN: """ @@ -203,11 +203,11 @@ def test_multisensor_choice(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs={ - 'name': 'test1', - 'sensor': {'test_sensor2', 'test_sensor1'}, - 'mode': 'L' + "name": "test1", + "sensor": {"test_sensor2", "test_sensor1"}, + "mode": "L" }, - dims=['y', 'x']) + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) @@ -226,11 +226,11 @@ def test_multisensor_exact(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs={ - 'name': 'my_comp', - 'sensor': {'test_sensor2', 'test_sensor1'}, - 'mode': 'L' + "name": "my_comp", + "sensor": {"test_sensor2", "test_sensor1"}, + "mode": "L" }, - dims=['y', 'x']) + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) @@ -248,8 +248,8 @@ def test_enhance_bad_query_value(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(name=["I", "am", "invalid"], sensor='test_sensor2', mode='L'), - dims=['y', 'x']) + attrs=dict(name=["I", "am", "invalid"], sensor="test_sensor2", mode="L"), + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None with pytest.raises(KeyError, match="No .* found for None"): @@ -259,11 +259,11 @@ def test_enhance_bad_query_value(self): class TestEnhancerUserConfigs(_BaseCustomEnhancementConfigTests): """Test `Enhancer` functionality when user's custom configurations are present.""" - ENH_FN = 'test_sensor.yaml' - ENH_ENH_FN = os.path.join('enhancements', ENH_FN) - ENH_FN2 = 'test_sensor2.yaml' - ENH_ENH_FN2 = os.path.join('enhancements', ENH_FN2) - ENH_FN3 = 'test_empty.yaml' + ENH_FN = "test_sensor.yaml" + ENH_ENH_FN = os.path.join("enhancements", ENH_FN) + ENH_FN2 = "test_sensor2.yaml" + ENH_ENH_FN2 = os.path.join("enhancements", ENH_FN2) + ENH_FN3 = "test_empty.yaml" TEST_CONFIGS = { ENH_FN: """ @@ -303,8 +303,8 @@ def test_enhance_empty_config(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(sensor='test_empty', mode='L'), - dims=['y', 'x']) + attrs=dict(sensor="test_empty", mode="L"), + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None get_enhanced_image(ds, enhance=e) @@ -317,8 +317,8 @@ def test_enhance_with_sensor_no_entry(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(sensor='test_sensor2', mode='L'), - dims=['y', 'x']) + attrs=dict(sensor="test_sensor2", mode="L"), + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None get_enhanced_image(ds, enhance=e) @@ -332,8 +332,8 @@ def test_no_enhance(self): from satpy.writers import get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(name='test1', sensor='test_sensor', mode='L'), - dims=['y', 'x']) + attrs=dict(name="test1", sensor="test_sensor", mode="L"), + dims=["y", "x"]) img = get_enhanced_image(ds, enhance=False) np.testing.assert_allclose(img.data.data.compute().squeeze(), ds.data) @@ -341,8 +341,8 @@ def test_writer_no_enhance(self): """Test turning off enhancements with writer.""" from xarray import DataArray ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(name='test1', sensor='test_sensor', mode='L'), - dims=['y', 'x']) + attrs=dict(name="test1", sensor="test_sensor", mode="L"), + dims=["y", "x"]) writer = self.CustomImageWriter(enhance=False) writer.save_datasets((ds,), compute=False) img = writer.img @@ -354,8 +354,8 @@ def test_writer_custom_enhance(self): from satpy.writers import Enhancer ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(name='test1', sensor='test_sensor', mode='L'), - dims=['y', 'x']) + attrs=dict(name="test1", sensor="test_sensor", mode="L"), + dims=["y", "x"]) enhance = Enhancer() writer = self.CustomImageWriter(enhance=enhance) writer.save_datasets((ds,), compute=False) @@ -368,8 +368,8 @@ def test_enhance_with_sensor_entry(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(name='test1', sensor='test_sensor', mode='L'), - dims=['y', 'x']) + attrs=dict(name="test1", sensor="test_sensor", mode="L"), + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) @@ -380,8 +380,8 @@ def test_enhance_with_sensor_entry(self): 1.) ds = DataArray(da.arange(1, 11., chunks=5).reshape((2, 5)), - attrs=dict(name='test1', sensor='test_sensor', mode='L'), - dims=['y', 'x']) + attrs=dict(name="test1", sensor="test_sensor", mode="L"), + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) @@ -396,9 +396,9 @@ def test_enhance_with_sensor_entry2(self): from satpy.writers import Enhancer, get_enhanced_image ds = DataArray(np.arange(1, 11.).reshape((2, 5)), - attrs=dict(name='test1', units='kelvin', - sensor='test_sensor', mode='L'), - dims=['y', 'x']) + attrs=dict(name="test1", units="kelvin", + sensor="test_sensor", mode="L"), + dims=["y", "x"]) e = Enhancer() assert e.enhancement_tree is not None img = get_enhanced_image(ds, enhance=e) @@ -411,7 +411,7 @@ def test_enhance_with_sensor_entry2(self): class TestReaderEnhancerConfigs(_BaseCustomEnhancementConfigTests): """Test enhancement configs that use reader name.""" - ENH_FN = 'test_sensor1.yaml' + ENH_FN = "test_sensor1.yaml" # NOTE: The sections are ordered in a special way so that if 'reader' key # isn't provided that we'll get the section we didn't want and all tests @@ -452,11 +452,11 @@ def _get_test_data_array(self): from xarray import DataArray ds = DataArray(np.arange(1, 11.).reshape((2, 5)), attrs={ - 'name': 'test1', - 'sensor': 'test_sensor1', - 'mode': 'L', + "name": "test1", + "sensor": "test_sensor1", + "mode": "L", }, - dims=['y', 'x']) + dims=["y", "x"]) return ds def _get_enhanced_image(self, data_arr): @@ -512,33 +512,31 @@ def test_filename_matches_writer_name(self): class IgnoreLoader(yaml.SafeLoader): def _ignore_all_tags(self, tag_suffix, node): - return tag_suffix + ' ' + node.value - IgnoreLoader.add_multi_constructor('', IgnoreLoader._ignore_all_tags) + return tag_suffix + " " + node.value + IgnoreLoader.add_multi_constructor("", IgnoreLoader._ignore_all_tags) from satpy._config import glob_config from satpy.writers import read_writer_config - for writer_config in glob_config('writers/*.yaml'): + for writer_config in glob_config("writers/*.yaml"): writer_fn = os.path.basename(writer_config) writer_fn_name = os.path.splitext(writer_fn)[0] writer_info = read_writer_config([writer_config], loader=IgnoreLoader) - self.assertEqual(writer_fn_name, writer_info['name'], - "Writer YAML filename doesn't match writer " - "name in the YAML file.") + assert writer_fn_name == writer_info["name"] def test_available_writers(self): """Test the 'available_writers' function.""" from satpy import available_writers writer_names = available_writers() - self.assertGreater(len(writer_names), 0) - self.assertIsInstance(writer_names[0], str) - self.assertIn('geotiff', writer_names) + assert len(writer_names) > 0 + assert isinstance(writer_names[0], str) + assert "geotiff" in writer_names writer_infos = available_writers(as_dict=True) - self.assertEqual(len(writer_names), len(writer_infos)) - self.assertIsInstance(writer_infos[0], dict) + assert len(writer_names) == len(writer_infos) + assert isinstance(writer_infos[0], dict) for writer_info in writer_infos: - self.assertIn('name', writer_info) + assert "name" in writer_info class TestComputeWriterResults(unittest.TestCase): @@ -553,12 +551,12 @@ def setUp(self): ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime(2018, 1, 1, 0, 0, 0)} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime(2018, 1, 1, 0, 0, 0)} ) self.scn = Scene() - self.scn['test'] = ds1 + self.scn["test"] = ds1 # Temp dir self.base_dir = tempfile.mkdtemp() @@ -578,23 +576,23 @@ def test_empty(self): def test_simple_image(self): """Test writing to PNG file.""" from satpy.writers import compute_writer_results - fname = os.path.join(self.base_dir, 'simple_image.png') + fname = os.path.join(self.base_dir, "simple_image.png") res = self.scn.save_datasets(filename=fname, - datasets=['test'], - writer='simple_image', + datasets=["test"], + writer="simple_image", compute=False) compute_writer_results([res]) - self.assertTrue(os.path.isfile(fname)) + assert os.path.isfile(fname) def test_geotiff(self): """Test writing to mitiff file.""" from satpy.writers import compute_writer_results - fname = os.path.join(self.base_dir, 'geotiff.tif') + fname = os.path.join(self.base_dir, "geotiff.tif") res = self.scn.save_datasets(filename=fname, - datasets=['test'], - writer='geotiff', compute=False) + datasets=["test"], + writer="geotiff", compute=False) compute_writer_results([res]) - self.assertTrue(os.path.isfile(fname)) + assert os.path.isfile(fname) # FIXME: This reader needs more information than exist at the moment # def test_mitiff(self): @@ -619,48 +617,48 @@ def test_geotiff(self): def test_multiple_geotiff(self): """Test writing to mitiff file.""" from satpy.writers import compute_writer_results - fname1 = os.path.join(self.base_dir, 'geotiff1.tif') + fname1 = os.path.join(self.base_dir, "geotiff1.tif") res1 = self.scn.save_datasets(filename=fname1, - datasets=['test'], - writer='geotiff', compute=False) - fname2 = os.path.join(self.base_dir, 'geotiff2.tif') + datasets=["test"], + writer="geotiff", compute=False) + fname2 = os.path.join(self.base_dir, "geotiff2.tif") res2 = self.scn.save_datasets(filename=fname2, - datasets=['test'], - writer='geotiff', compute=False) + datasets=["test"], + writer="geotiff", compute=False) compute_writer_results([res1, res2]) - self.assertTrue(os.path.isfile(fname1)) - self.assertTrue(os.path.isfile(fname2)) + assert os.path.isfile(fname1) + assert os.path.isfile(fname2) def test_multiple_simple(self): """Test writing to geotiff files.""" from satpy.writers import compute_writer_results - fname1 = os.path.join(self.base_dir, 'simple_image1.png') + fname1 = os.path.join(self.base_dir, "simple_image1.png") res1 = self.scn.save_datasets(filename=fname1, - datasets=['test'], - writer='simple_image', compute=False) - fname2 = os.path.join(self.base_dir, 'simple_image2.png') + datasets=["test"], + writer="simple_image", compute=False) + fname2 = os.path.join(self.base_dir, "simple_image2.png") res2 = self.scn.save_datasets(filename=fname2, - datasets=['test'], - writer='simple_image', compute=False) + datasets=["test"], + writer="simple_image", compute=False) compute_writer_results([res1, res2]) - self.assertTrue(os.path.isfile(fname1)) - self.assertTrue(os.path.isfile(fname2)) + assert os.path.isfile(fname1) + assert os.path.isfile(fname2) def test_mixed(self): """Test writing to multiple mixed-type files.""" from satpy.writers import compute_writer_results - fname1 = os.path.join(self.base_dir, 'simple_image3.png') + fname1 = os.path.join(self.base_dir, "simple_image3.png") res1 = self.scn.save_datasets(filename=fname1, - datasets=['test'], - writer='simple_image', compute=False) - fname2 = os.path.join(self.base_dir, 'geotiff3.tif') + datasets=["test"], + writer="simple_image", compute=False) + fname2 = os.path.join(self.base_dir, "geotiff3.tif") res2 = self.scn.save_datasets(filename=fname2, - datasets=['test'], - writer='geotiff', compute=False) + datasets=["test"], + writer="geotiff", compute=False) res3 = [] compute_writer_results([res1, res2, res3]) - self.assertTrue(os.path.isfile(fname1)) - self.assertTrue(os.path.isfile(fname2)) + assert os.path.isfile(fname1) + assert os.path.isfile(fname2) class TestBaseWriter: @@ -675,18 +673,18 @@ def setup_method(self): ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), + dims=("y", "x"), attrs={ - 'name': 'test', - 'start_time': datetime(2018, 1, 1, 0, 0, 0), - 'sensor': 'fake_sensor', + "name": "test", + "start_time": datetime(2018, 1, 1, 0, 0, 0), + "sensor": "fake_sensor", } ) ds2 = ds1.copy() - ds2.attrs['sensor'] = {'fake_sensor1', 'fake_sensor2'} + ds2.attrs["sensor"] = {"fake_sensor1", "fake_sensor2"} self.scn = Scene() - self.scn['test'] = ds1 - self.scn['test2'] = ds2 + self.scn["test"] = ds1 + self.scn["test2"] = ds2 # Temp dir self.base_dir = tempfile.mkdtemp() @@ -700,16 +698,16 @@ def teardown_method(self): def test_save_dataset_static_filename(self): """Test saving a dataset with a static filename specified.""" - self.scn.save_datasets(base_dir=self.base_dir, filename='geotiff.tif') - assert os.path.isfile(os.path.join(self.base_dir, 'geotiff.tif')) + self.scn.save_datasets(base_dir=self.base_dir, filename="geotiff.tif") + assert os.path.isfile(os.path.join(self.base_dir, "geotiff.tif")) @pytest.mark.parametrize( - ('fmt_fn', 'exp_fns'), + ("fmt_fn", "exp_fns"), [ - ('geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif', - ['geotiff_test_20180101_000000.tif', 'geotiff_test2_20180101_000000.tif']), - ('geotiff_{name}_{sensor}.tif', - ['geotiff_test_fake_sensor.tif', 'geotiff_test2_fake_sensor1-fake_sensor2.tif']), + ("geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif", + ["geotiff_test_20180101_000000.tif", "geotiff_test2_20180101_000000.tif"]), + ("geotiff_{name}_{sensor}.tif", + ["geotiff_test_fake_sensor.tif", "geotiff_test2_fake_sensor1-fake_sensor2.tif"]), ] ) def test_save_dataset_dynamic_filename(self, fmt_fn, exp_fns): @@ -721,14 +719,14 @@ def test_save_dataset_dynamic_filename(self, fmt_fn, exp_fns): def test_save_dataset_dynamic_filename_with_dir(self): """Test saving a dataset with a format filename that includes a directory.""" - fmt_fn = os.path.join('{start_time:%Y%m%d}', 'geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif') - exp_fn = os.path.join('20180101', 'geotiff_test_20180101_000000.tif') + fmt_fn = os.path.join("{start_time:%Y%m%d}", "geotiff_{name}_{start_time:%Y%m%d_%H%M%S}.tif") + exp_fn = os.path.join("20180101", "geotiff_test_20180101_000000.tif") self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn) assert os.path.isfile(os.path.join(self.base_dir, exp_fn)) # change the filename pattern but keep the same directory - fmt_fn2 = os.path.join('{start_time:%Y%m%d}', 'geotiff_{name}_{start_time:%Y%m%d_%H}.tif') - exp_fn2 = os.path.join('20180101', 'geotiff_test_20180101_00.tif') + fmt_fn2 = os.path.join("{start_time:%Y%m%d}", "geotiff_{name}_{start_time:%Y%m%d_%H}.tif") + exp_fn2 = os.path.join("20180101", "geotiff_test_20180101_00.tif") self.scn.save_datasets(base_dir=self.base_dir, filename=fmt_fn2) assert os.path.isfile(os.path.join(self.base_dir, exp_fn2)) # the original file should still exist @@ -743,53 +741,53 @@ def setUp(self): from pyresample.geometry import AreaDefinition from trollimage.xrimage import XRImage - proj_dict = {'proj': 'lcc', 'datum': 'WGS84', 'ellps': 'WGS84', - 'lon_0': -95., 'lat_0': 25, 'lat_1': 25, - 'units': 'm', 'no_defs': True} + proj_dict = {"proj": "lcc", "datum": "WGS84", "ellps": "WGS84", + "lon_0": -95., "lat_0": 25, "lat_1": 25, + "units": "m", "no_defs": True} self.area_def = AreaDefinition( - 'test', 'test', 'test', proj_dict, + "test", "test", "test", proj_dict, 200, 400, (-1000., -1500., 1000., 1500.), ) self.orig_rgb_img = XRImage( xr.DataArray(da.arange(75., chunks=10).reshape(3, 5, 5) / 75., - dims=('bands', 'y', 'x'), - coords={'bands': ['R', 'G', 'B']}, - attrs={'name': 'test_ds', 'area': self.area_def}) + dims=("bands", "y", "x"), + coords={"bands": ["R", "G", "B"]}, + attrs={"name": "test_ds", "area": self.area_def}) ) self.orig_l_img = XRImage( xr.DataArray(da.arange(25., chunks=10).reshape(5, 5) / 75., - dims=('y', 'x'), - attrs={'name': 'test_ds', 'area': self.area_def}) + dims=("y", "x"), + attrs={"name": "test_ds", "area": self.area_def}) ) self.decorate = { - 'decorate': [ - {'logo': {'logo_path': '', 'height': 143, 'bg': 'white', 'bg_opacity': 255}}, - {'text': { - 'txt': 'TEST', - 'align': {'top_bottom': 'bottom', 'left_right': 'right'}, - 'font': '', - 'font_size': 22, - 'height': 30, - 'bg': 'black', - 'bg_opacity': 255, - 'line': 'white'}}, - {'scale': { - 'colormap': greys, - 'extend': False, - 'width': 1670, 'height': 110, - 'tick_marks': 5, 'minor_tick_marks': 1, - 'cursor': [0, 0], 'bg': 'white', - 'title': 'TEST TITLE OF SCALE', - 'fontsize': 110, 'align': 'cc' + "decorate": [ + {"logo": {"logo_path": "", "height": 143, "bg": "white", "bg_opacity": 255}}, + {"text": { + "txt": "TEST", + "align": {"top_bottom": "bottom", "left_right": "right"}, + "font": "", + "font_size": 22, + "height": 30, + "bg": "black", + "bg_opacity": 255, + "line": "white"}}, + {"scale": { + "colormap": greys, + "extend": False, + "width": 1670, "height": 110, + "tick_marks": 5, "minor_tick_marks": 1, + "cursor": [0, 0], "bg": "white", + "title": "TEST TITLE OF SCALE", + "fontsize": 110, "align": "cc" }} ] } import_mock = mock.MagicMock() - modules = {'pycoast': import_mock.pycoast, - 'pydecorate': import_mock.pydecorate} - self.module_patcher = mock.patch.dict('sys.modules', modules) + modules = {"pycoast": import_mock.pycoast, + "pydecorate": import_mock.pydecorate} + self.module_patcher = mock.patch.dict("sys.modules", modules) self.module_patcher.start() def tearDown(self): @@ -801,21 +799,21 @@ def test_add_overlay_basic_rgb(self): from pycoast import ContourWriterAGG from satpy.writers import _burn_overlay, add_overlay - coast_dir = '/path/to/coast/data' + coast_dir = "/path/to/coast/data" with mock.patch.object(self.orig_rgb_img, "apply_pil") as apply_pil: apply_pil.return_value = self.orig_rgb_img new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir, fill_value=0) - self.assertEqual(self.orig_rgb_img.mode, new_img.mode) + assert self.orig_rgb_img.mode == new_img.mode new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir) - self.assertEqual(self.orig_rgb_img.mode + 'A', new_img.mode) + assert self.orig_rgb_img.mode + "A" == new_img.mode with mock.patch.object(self.orig_rgb_img, "convert") as convert: convert.return_value = self.orig_rgb_img - overlays = {'coasts': {'outline': 'red'}} + overlays = {"coasts": {"outline": "red"}} new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir, overlays=overlays, fill_value=0) pil_args = None - pil_kwargs = {'fill_value': 0} + pil_kwargs = {"fill_value": 0} fun_args = (self.orig_rgb_img.data.area, ContourWriterAGG.return_value, overlays) fun_kwargs = None apply_pil.assert_called_with(_burn_overlay, self.orig_rgb_img.mode, @@ -824,11 +822,11 @@ def test_add_overlay_basic_rgb(self): # test legacy call - grid = {'minor_is_tick': True} - color = 'red' - expected_overlays = {'coasts': {'outline': color, 'width': 0.5, 'level': 1}, - 'borders': {'outline': color, 'width': 0.5, 'level': 1}, - 'grid': grid} + grid = {"minor_is_tick": True} + color = "red" + expected_overlays = {"coasts": {"outline": color, "width": 0.5, "level": 1}, + "borders": {"outline": color, "width": 0.5, "level": 1}, + "grid": grid} with warnings.catch_warnings(record=True) as wns: warnings.simplefilter("always") new_img = add_overlay(self.orig_rgb_img, self.area_def, coast_dir, @@ -838,7 +836,7 @@ def test_add_overlay_basic_rgb(self): assert "deprecated" in str(wns[0].message) pil_args = None - pil_kwargs = {'fill_value': 0} + pil_kwargs = {"fill_value": 0} fun_args = (self.orig_rgb_img.data.area, ContourWriterAGG.return_value, expected_overlays) fun_kwargs = None apply_pil.assert_called_with(_burn_overlay, self.orig_rgb_img.mode, @@ -848,22 +846,22 @@ def test_add_overlay_basic_rgb(self): def test_add_overlay_basic_l(self): """Test basic add_overlay usage with L data.""" from satpy.writers import add_overlay - new_img = add_overlay(self.orig_l_img, self.area_def, '', fill_value=0) - self.assertEqual('RGB', new_img.mode) - new_img = add_overlay(self.orig_l_img, self.area_def, '') - self.assertEqual('RGBA', new_img.mode) + new_img = add_overlay(self.orig_l_img, self.area_def, "", fill_value=0) + assert "RGB" == new_img.mode + new_img = add_overlay(self.orig_l_img, self.area_def, "") + assert "RGBA" == new_img.mode def test_add_decorate_basic_rgb(self): """Test basic add_decorate usage with RGB data.""" from satpy.writers import add_decorate new_img = add_decorate(self.orig_rgb_img, **self.decorate) - self.assertEqual('RGBA', new_img.mode) + assert "RGBA" == new_img.mode def test_add_decorate_basic_l(self): """Test basic add_decorate usage with L data.""" from satpy.writers import add_decorate new_img = add_decorate(self.orig_l_img, **self.decorate) - self.assertEqual('RGBA', new_img.mode) + assert "RGBA" == new_img.mode def test_group_results_by_output_file(tmp_path): diff --git a/satpy/tests/test_yaml_reader.py b/satpy/tests/test_yaml_reader.py index 0d2e057f32..35752cd237 100644 --- a/satpy/tests/test_yaml_reader.py +++ b/satpy/tests/test_yaml_reader.py @@ -37,47 +37,47 @@ from satpy.tests.utils import make_dataid MHS_YAML_READER_DICT = { - 'reader': {'name': 'mhs_l1c_aapp', - 'description': 'AAPP l1c Reader for AMSU-B/MHS data', - 'sensors': ['mhs'], - 'default_channels': [1, 2, 3, 4, 5], - 'data_identification_keys': {'name': {'required': True}, - 'frequency_double_sideband': - {'type': FrequencyDoubleSideBand}, - 'frequency_range': {'type': FrequencyRange}, - 'resolution': None, - 'polarization': {'enum': ['H', 'V']}, - 'calibration': {'enum': ['brightness_temperature'], 'transitive': True}, - 'modifiers': {'required': True, - 'default': [], - 'type': ModifierTuple}}, - 'config_files': ('satpy/etc/readers/mhs_l1c_aapp.yaml',)}, - 'datasets': {'1': {'name': '1', - 'frequency_range': {'central': 89.0, 'bandwidth': 2.8, 'unit': 'GHz'}, - 'polarization': 'V', - 'resolution': 16000, - 'calibration': {'brightness_temperature': {'standard_name': 'toa_brightness_temperature'}}, - 'coordinates': ['longitude', 'latitude'], - 'file_type': 'mhs_aapp_l1c'}, - '2': {'name': '2', - 'frequency_range': {'central': 157.0, 'bandwidth': 2.8, 'unit': 'GHz'}, - 'polarization': 'V', - 'resolution': 16000, - 'calibration': {'brightness_temperature': {'standard_name': 'toa_brightness_temperature'}}, - 'coordinates': ['longitude', 'latitude'], - 'file_type': 'mhs_aapp_l1c'}, - '3': {'name': '3', - 'frequency_double_sideband': {'unit': 'GHz', - 'central': 183.31, - 'side': 1.0, - 'bandwidth': 1.0}, - 'polarization': 'V', - 'resolution': 16000, - 'calibration': {'brightness_temperature': {'standard_name': 'toa_brightness_temperature'}}, - 'coordinates': ['longitude', 'latitude'], - 'file_type': 'mhs_aapp_l1c'}}, - 'file_types': {'mhs_aapp_l1c': {'file_reader': BaseFileHandler, - 'file_patterns': [ + "reader": {"name": "mhs_l1c_aapp", + "description": "AAPP l1c Reader for AMSU-B/MHS data", + "sensors": ["mhs"], + "default_channels": [1, 2, 3, 4, 5], + "data_identification_keys": {"name": {"required": True}, + "frequency_double_sideband": + {"type": FrequencyDoubleSideBand}, + "frequency_range": {"type": FrequencyRange}, + "resolution": None, + "polarization": {"enum": ["H", "V"]}, + "calibration": {"enum": ["brightness_temperature"], "transitive": True}, + "modifiers": {"required": True, + "default": [], + "type": ModifierTuple}}, + "config_files": ("satpy/etc/readers/mhs_l1c_aapp.yaml",)}, + "datasets": {"1": {"name": "1", + "frequency_range": {"central": 89.0, "bandwidth": 2.8, "unit": "GHz"}, + "polarization": "V", + "resolution": 16000, + "calibration": {"brightness_temperature": {"standard_name": "toa_brightness_temperature"}}, + "coordinates": ["longitude", "latitude"], + "file_type": "mhs_aapp_l1c"}, + "2": {"name": "2", + "frequency_range": {"central": 157.0, "bandwidth": 2.8, "unit": "GHz"}, + "polarization": "V", + "resolution": 16000, + "calibration": {"brightness_temperature": {"standard_name": "toa_brightness_temperature"}}, + "coordinates": ["longitude", "latitude"], + "file_type": "mhs_aapp_l1c"}, + "3": {"name": "3", + "frequency_double_sideband": {"unit": "GHz", + "central": 183.31, + "side": 1.0, + "bandwidth": 1.0}, + "polarization": "V", + "resolution": 16000, + "calibration": {"brightness_temperature": {"standard_name": "toa_brightness_temperature"}}, + "coordinates": ["longitude", "latitude"], + "file_type": "mhs_aapp_l1c"}}, + "file_types": {"mhs_aapp_l1c": {"file_reader": BaseFileHandler, + "file_patterns": [ 'mhsl1c_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1c']}}} # noqa @@ -91,7 +91,7 @@ def __init__(self, start_time, end_time): self._end_time = end_time self.get_bounding_box = MagicMock() fake_ds = MagicMock() - fake_ds.return_value.dims = ['x', 'y'] + fake_ds.return_value.dims = ["x", "y"] self.get_dataset = fake_ds self.combine_info = MagicMock() @@ -111,39 +111,39 @@ class TestUtils(unittest.TestCase): def test_get_filebase(self): """Check the get_filebase function.""" - base_dir = os.path.join(os.path.expanduser('~'), 'data', - 'satellite', 'Sentinel-3') - base_data = ('S3A_OL_1_EFR____20161020T081224_20161020T081524_' - '20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3') + base_dir = os.path.join(os.path.expanduser("~"), "data", + "satellite", "Sentinel-3") + base_data = ("S3A_OL_1_EFR____20161020T081224_20161020T081524_" + "20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3") base_dir = os.path.join(base_dir, base_data) - pattern = ('{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s' - '}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre' - 'ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati' - 've_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:' - '2s}_{collection:3s}.SEN3/geo_coordinates.nc') - pattern = os.path.join(*pattern.split('/')) - filename = os.path.join(base_dir, 'Oa05_radiance.nc') - expected = os.path.join(base_data, 'Oa05_radiance.nc') - self.assertEqual(yr._get_filebase(filename, pattern), expected) + pattern = ("{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s" + "}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre" + "ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati" + "ve_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:" + "2s}_{collection:3s}.SEN3/geo_coordinates.nc") + pattern = os.path.join(*pattern.split("/")) + filename = os.path.join(base_dir, "Oa05_radiance.nc") + expected = os.path.join(base_data, "Oa05_radiance.nc") + assert yr._get_filebase(filename, pattern) == expected def test_match_filenames(self): """Check that matching filenames works.""" # just a fake path for testing that doesn't have to exist - base_dir = os.path.join(os.path.expanduser('~'), 'data', - 'satellite', 'Sentinel-3') - base_data = ('S3A_OL_1_EFR____20161020T081224_20161020T081524_' - '20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3') + base_dir = os.path.join(os.path.expanduser("~"), "data", + "satellite", "Sentinel-3") + base_data = ("S3A_OL_1_EFR____20161020T081224_20161020T081524_" + "20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3") base_dir = os.path.join(base_dir, base_data) - pattern = ('{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s' - '}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre' - 'ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati' - 've_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:' - '2s}_{collection:3s}.SEN3/geo_coordinates.nc') - pattern = os.path.join(*pattern.split('/')) - filenames = [os.path.join(base_dir, 'Oa05_radiance.nc'), - os.path.join(base_dir, 'geo_coordinates.nc')] - expected = os.path.join(base_dir, 'geo_coordinates.nc') - self.assertEqual(yr._match_filenames(filenames, pattern), {expected}) + pattern = ("{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s" + "}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre" + "ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati" + "ve_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:" + "2s}_{collection:3s}.SEN3/geo_coordinates.nc") + pattern = os.path.join(*pattern.split("/")) + filenames = [os.path.join(base_dir, "Oa05_radiance.nc"), + os.path.join(base_dir, "geo_coordinates.nc")] + expected = os.path.join(base_dir, "geo_coordinates.nc") + assert yr._match_filenames(filenames, pattern) == {expected} def test_match_filenames_windows_forward_slash(self): """Check that matching filenames works on Windows with forward slashes. @@ -152,28 +152,27 @@ def test_match_filenames_windows_forward_slash(self): """ # just a fake path for testing that doesn't have to exist - base_dir = os.path.join(os.path.expanduser('~'), 'data', - 'satellite', 'Sentinel-3') - base_data = ('S3A_OL_1_EFR____20161020T081224_20161020T081524_' - '20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3') + base_dir = os.path.join(os.path.expanduser("~"), "data", + "satellite", "Sentinel-3") + base_data = ("S3A_OL_1_EFR____20161020T081224_20161020T081524_" + "20161020T102406_0179_010_078_2340_SVL_O_NR_002.SEN3") base_dir = os.path.join(base_dir, base_data) - pattern = ('{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s' - '}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre' - 'ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati' - 've_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:' - '2s}_{collection:3s}.SEN3/geo_coordinates.nc') - pattern = os.path.join(*pattern.split('/')) - filenames = [os.path.join(base_dir, 'Oa05_radiance.nc').replace(os.sep, '/'), - os.path.join(base_dir, 'geo_coordinates.nc').replace(os.sep, '/')] - expected = os.path.join(base_dir, 'geo_coordinates.nc').replace(os.sep, '/') - self.assertEqual(yr._match_filenames(filenames, pattern), {expected}) + pattern = ("{mission_id:3s}_OL_{processing_level:1s}_{datatype_id:_<6s" + "}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{cre" + "ation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relati" + "ve_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:" + "2s}_{collection:3s}.SEN3/geo_coordinates.nc") + pattern = os.path.join(*pattern.split("/")) + filenames = [os.path.join(base_dir, "Oa05_radiance.nc").replace(os.sep, "/"), + os.path.join(base_dir, "geo_coordinates.nc").replace(os.sep, "/")] + expected = os.path.join(base_dir, "geo_coordinates.nc").replace(os.sep, "/") + assert yr._match_filenames(filenames, pattern) == {expected} def test_listify_string(self): """Check listify_string.""" - self.assertEqual(yr.listify_string(None), []) - self.assertEqual(yr.listify_string('some string'), ['some string']) - self.assertEqual(yr.listify_string(['some', 'string']), - ['some', 'string']) + assert yr.listify_string(None) == [] + assert yr.listify_string("some string") == ["some string"] + assert yr.listify_string(["some", "string"]) == ["some", "string"] class DummyReader(BaseFileHandler): @@ -203,59 +202,59 @@ class TestFileFileYAMLReaderMultiplePatterns(unittest.TestCase): def setUp(self): """Prepare a reader instance with a fake config.""" - patterns = ['a{something:3s}.bla', - 'a0{something:2s}.bla'] - res_dict = {'reader': {'name': 'fake', - 'sensors': ['canon']}, - 'file_types': {'ftype1': {'name': 'ft1', - 'file_patterns': patterns, - 'file_reader': DummyReader}}, - 'datasets': {'ch1': {'name': 'ch01', - 'wavelength': [0.5, 0.6, 0.7], - 'calibration': 'reflectance', - 'file_type': 'ftype1', - 'coordinates': ['lons', 'lats']}, - 'ch2': {'name': 'ch02', - 'wavelength': [0.7, 0.75, 0.8], - 'calibration': 'counts', - 'file_type': 'ftype1', - 'coordinates': ['lons', 'lats']}, - 'lons': {'name': 'lons', - 'file_type': 'ftype2'}, - 'lats': {'name': 'lats', - 'file_type': 'ftype2'}}} + patterns = ["a{something:3s}.bla", + "a0{something:2s}.bla"] + res_dict = {"reader": {"name": "fake", + "sensors": ["canon"]}, + "file_types": {"ftype1": {"name": "ft1", + "file_patterns": patterns, + "file_reader": DummyReader}}, + "datasets": {"ch1": {"name": "ch01", + "wavelength": [0.5, 0.6, 0.7], + "calibration": "reflectance", + "file_type": "ftype1", + "coordinates": ["lons", "lats"]}, + "ch2": {"name": "ch02", + "wavelength": [0.7, 0.75, 0.8], + "calibration": "counts", + "file_type": "ftype1", + "coordinates": ["lons", "lats"]}, + "lons": {"name": "lons", + "file_type": "ftype2"}, + "lats": {"name": "lats", + "file_type": "ftype2"}}} self.config = res_dict self.reader = yr.FileYAMLReader(self.config, filter_parameters={ - 'start_time': datetime(2000, 1, 1), - 'end_time': datetime(2000, 1, 2)}) + "start_time": datetime(2000, 1, 1), + "end_time": datetime(2000, 1, 2)}) def test_select_from_pathnames(self): """Check select_files_from_pathnames.""" - filelist = ['a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] + filelist = ["a001.bla", "a002.bla", "abcd.bla", "k001.bla", "a003.bli"] res = self.reader.select_files_from_pathnames(filelist) - for expected in ['a001.bla', 'a002.bla', 'abcd.bla']: - self.assertIn(expected, res) - self.assertEqual(len(res), 3) + for expected in ["a001.bla", "a002.bla", "abcd.bla"]: + assert expected in res + assert len(res) == 3 def test_fn_items_for_ft(self): """Check filename_items_for_filetype.""" - filelist = ['a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] - ft_info = self.config['file_types']['ftype1'] + filelist = ["a001.bla", "a002.bla", "abcd.bla", "k001.bla", "a003.bli"] + ft_info = self.config["file_types"]["ftype1"] fiter = self.reader.filename_items_for_filetype(filelist, ft_info) filenames = dict(fname for fname in fiter) - self.assertEqual(len(filenames.keys()), 3) + assert len(filenames.keys()) == 3 def test_create_filehandlers(self): """Check create_filehandlers.""" - filelist = ['a001.bla', 'a002.bla', 'a001.bla', 'a002.bla', - 'abcd.bla', 'k001.bla', 'a003.bli'] + filelist = ["a001.bla", "a002.bla", "a001.bla", "a002.bla", + "abcd.bla", "k001.bla", "a003.bli"] self.reader.create_filehandlers(filelist) - self.assertEqual(len(self.reader.file_handlers['ftype1']), 3) + assert len(self.reader.file_handlers["ftype1"]) == 3 def test_serializable(self): """Check that a reader is serializable by dask. @@ -264,8 +263,8 @@ def test_serializable(self): readers. """ from distributed.protocol import deserialize, serialize - filelist = ['a001.bla', 'a002.bla', 'a001.bla', 'a002.bla', - 'abcd.bla', 'k001.bla', 'a003.bli'] + filelist = ["a001.bla", "a002.bla", "a001.bla", "a002.bla", + "abcd.bla", "k001.bla", "a003.bli"] self.reader.create_filehandlers(filelist) cloned_reader = deserialize(*serialize(self.reader)) @@ -281,8 +280,8 @@ def setUp(self): self.config = MHS_YAML_READER_DICT self.reader = yr.FileYAMLReader(MHS_YAML_READER_DICT, filter_parameters={ - 'start_time': datetime(2000, 1, 1), - 'end_time': datetime(2000, 1, 2), + "start_time": datetime(2000, 1, 1), + "end_time": datetime(2000, 1, 2), }) def test_custom_type_with_dict_contents_gets_parsed_correctly(self): @@ -298,73 +297,66 @@ class TestFileFileYAMLReader(unittest.TestCase): def setUp(self): """Prepare a reader instance with a fake config.""" - patterns = ['a{something:3s}.bla'] - res_dict = {'reader': {'name': 'fake', - 'sensors': ['canon']}, - 'file_types': {'ftype1': {'name': 'ft1', - 'file_reader': BaseFileHandler, - 'file_patterns': patterns}}, - 'datasets': {'ch1': {'name': 'ch01', - 'wavelength': [0.5, 0.6, 0.7], - 'calibration': 'reflectance', - 'file_type': 'ftype1', - 'coordinates': ['lons', 'lats']}, - 'ch2': {'name': 'ch02', - 'wavelength': [0.7, 0.75, 0.8], - 'calibration': 'counts', - 'file_type': 'ftype1', - 'coordinates': ['lons', 'lats']}, - 'lons': {'name': 'lons', - 'file_type': 'ftype2'}, - 'lats': {'name': 'lats', - 'file_type': 'ftype2'}}} + patterns = ["a{something:3s}.bla"] + res_dict = {"reader": {"name": "fake", + "sensors": ["canon"]}, + "file_types": {"ftype1": {"name": "ft1", + "file_reader": BaseFileHandler, + "file_patterns": patterns}}, + "datasets": {"ch1": {"name": "ch01", + "wavelength": [0.5, 0.6, 0.7], + "calibration": "reflectance", + "file_type": "ftype1", + "coordinates": ["lons", "lats"]}, + "ch2": {"name": "ch02", + "wavelength": [0.7, 0.75, 0.8], + "calibration": "counts", + "file_type": "ftype1", + "coordinates": ["lons", "lats"]}, + "lons": {"name": "lons", + "file_type": "ftype2"}, + "lats": {"name": "lats", + "file_type": "ftype2"}}} self.config = res_dict self.reader = yr.FileYAMLReader(res_dict, filter_parameters={ - 'start_time': datetime(2000, 1, 1), - 'end_time': datetime(2000, 1, 2), + "start_time": datetime(2000, 1, 1), + "end_time": datetime(2000, 1, 2), }) def test_deprecated_passing_config_files(self): """Test that we get an exception when config files are passed to inti.""" - self.assertRaises(ValueError, yr.FileYAMLReader, '/path/to/some/file.yaml') + self.assertRaises(ValueError, yr.FileYAMLReader, "/path/to/some/file.yaml") def test_all_data_ids(self): """Check that all datasets ids are returned.""" for dataid in self.reader.all_dataset_ids: - name = dataid['name'].replace('0', '') - assert self.config['datasets'][name]['name'] == dataid['name'] - if 'wavelength' in self.config['datasets'][name]: - assert self.config['datasets'][name]['wavelength'] == list(dataid['wavelength'])[:3] - if 'calibration' in self.config['datasets'][name]: - assert self.config['datasets'][name]['calibration'] == dataid['calibration'] + name = dataid["name"].replace("0", "") + assert self.config["datasets"][name]["name"] == dataid["name"] + if "wavelength" in self.config["datasets"][name]: + assert self.config["datasets"][name]["wavelength"] == list(dataid["wavelength"])[:3] + if "calibration" in self.config["datasets"][name]: + assert self.config["datasets"][name]["calibration"] == dataid["calibration"] def test_all_dataset_names(self): """Get all dataset names.""" - self.assertSetEqual(self.reader.all_dataset_names, - set(['ch01', 'ch02', 'lons', 'lats'])) + assert self.reader.all_dataset_names == set(["ch01", "ch02", "lons", "lats"]) def test_available_dataset_ids(self): """Get ids of the available datasets.""" - loadables = self.reader.select_files_from_pathnames(['a001.bla']) + loadables = self.reader.select_files_from_pathnames(["a001.bla"]) self.reader.create_filehandlers(loadables) - self.assertSetEqual(set(self.reader.available_dataset_ids), - {make_dataid(name='ch02', - wavelength=(0.7, 0.75, 0.8), - calibration='counts', - modifiers=()), - make_dataid(name='ch01', - wavelength=(0.5, 0.6, 0.7), - calibration='reflectance', - modifiers=())}) + assert set(self.reader.available_dataset_ids) == {make_dataid(name="ch02", wavelength=(0.7, 0.75, 0.8), + calibration="counts", modifiers=()), + make_dataid(name="ch01", wavelength=(0.5, 0.6, 0.7), + calibration="reflectance", modifiers=())} def test_available_dataset_names(self): """Get ids of the available datasets.""" - loadables = self.reader.select_files_from_pathnames(['a001.bla']) + loadables = self.reader.select_files_from_pathnames(["a001.bla"]) self.reader.create_filehandlers(loadables) - self.assertSetEqual(set(self.reader.available_dataset_names), - set(["ch01", "ch02"])) + assert set(self.reader.available_dataset_names) == set(["ch01", "ch02"]) def test_filter_fh_by_time(self): """Check filtering filehandlers by time.""" @@ -383,35 +375,35 @@ def test_filter_fh_by_time(self): for idx, fh in enumerate([fh0, fh1, fh2, fh3, fh4, fh5]): res = self.reader.time_matches(fh.start_time, fh.end_time) # only the first one should be false - self.assertEqual(res, idx not in [0, 4]) + assert res == (idx not in [0, 4]) for idx, fh in enumerate([fh0, fh1, fh2, fh3, fh4, fh5]): res = self.reader.time_matches(fh.start_time, None) - self.assertEqual(res, idx not in [0, 1, 4, 5]) + assert res == (idx not in [0, 1, 4, 5]) - @patch('satpy.readers.yaml_reader.get_area_def') - @patch('satpy.readers.yaml_reader.AreaDefBoundary') - @patch('satpy.readers.yaml_reader.Boundary') + @patch("satpy.readers.yaml_reader.get_area_def") + @patch("satpy.readers.yaml_reader.AreaDefBoundary") + @patch("satpy.readers.yaml_reader.Boundary") def test_file_covers_area(self, bnd, adb, gad): """Test that area coverage is checked properly.""" file_handler = FakeFH(datetime(1999, 12, 31, 10, 0), datetime(2000, 1, 3, 12, 30)) - self.reader.filter_parameters['area'] = True + self.reader.filter_parameters["area"] = True bnd.return_value.contour_poly.intersection.return_value = True adb.return_value.contour_poly.intersection.return_value = True res = self.reader.check_file_covers_area(file_handler, True) - self.assertTrue(res) + assert res bnd.return_value.contour_poly.intersection.return_value = False adb.return_value.contour_poly.intersection.return_value = False res = self.reader.check_file_covers_area(file_handler, True) - self.assertFalse(res) + assert not res file_handler.get_bounding_box.side_effect = NotImplementedError() - self.reader.filter_parameters['area'] = True + self.reader.filter_parameters["area"] = True res = self.reader.check_file_covers_area(file_handler, True) - self.assertTrue(res) + assert res def test_start_end_time(self): """Check start and end time behaviours.""" @@ -441,40 +433,39 @@ def get_end_time(): datetime(2000, 1, 3, 12, 30)) self.reader.file_handlers = { - '0': [fh1, fh2, fh3, fh4, fh5], - '1': [fh0, fh1, fh2, fh3, fh4, fh5], - '2': [fh2, fh3], + "0": [fh1, fh2, fh3, fh4, fh5], + "1": [fh0, fh1, fh2, fh3, fh4, fh5], + "2": [fh2, fh3], } - self.assertEqual(self.reader.start_time, datetime(1999, 12, 30, 0, 0)) - self.assertEqual(self.reader.end_time, datetime(2000, 1, 3, 12, 30)) + assert self.reader.start_time == datetime(1999, 12, 30, 0, 0) + assert self.reader.end_time == datetime(2000, 1, 3, 12, 30) def test_select_from_pathnames(self): """Check select_files_from_pathnames.""" - filelist = ['a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] + filelist = ["a001.bla", "a002.bla", "abcd.bla", "k001.bla", "a003.bli"] res = self.reader.select_files_from_pathnames(filelist) - for expected in ['a001.bla', 'a002.bla', 'abcd.bla']: - self.assertIn(expected, res) + for expected in ["a001.bla", "a002.bla", "abcd.bla"]: + assert expected in res - self.assertEqual(0, len(self.reader.select_files_from_pathnames([]))) + assert 0 == len(self.reader.select_files_from_pathnames([])) def test_select_from_directory(self): """Check select_files_from_directory.""" - filelist = ['a001.bla', 'a002.bla', 'abcd.bla', 'k001.bla', 'a003.bli'] + filelist = ["a001.bla", "a002.bla", "abcd.bla", "k001.bla", "a003.bli"] dpath = mkdtemp() for fname in filelist: - with open(os.path.join(dpath, fname), 'w'): + with open(os.path.join(dpath, fname), "w"): pass res = self.reader.select_files_from_directory(dpath) - for expected in ['a001.bla', 'a002.bla', 'abcd.bla']: - self.assertIn(os.path.join(dpath, expected), res) + for expected in ["a001.bla", "a002.bla", "abcd.bla"]: + assert os.path.join(dpath, expected) in res for fname in filelist: os.remove(os.path.join(dpath, fname)) - self.assertEqual(0, - len(self.reader.select_files_from_directory(dpath))) + assert 0 == len(self.reader.select_files_from_directory(dpath)) os.rmdir(dpath) from fsspec.implementations.local import LocalFileSystem @@ -484,16 +475,14 @@ def glob(self, pattern): return ["/grocery/apricot.nc", "/grocery/aubergine.nc"] res = self.reader.select_files_from_directory(dpath, fs=Silly()) - self.assertEqual( - res, - {"/grocery/apricot.nc", "/grocery/aubergine.nc"}) + assert res == {"/grocery/apricot.nc", "/grocery/aubergine.nc"} def test_supports_sensor(self): """Check supports_sensor.""" - self.assertTrue(self.reader.supports_sensor('canon')) - self.assertFalse(self.reader.supports_sensor('nikon')) + assert self.reader.supports_sensor("canon") + assert not self.reader.supports_sensor("nikon") - @patch('satpy.readers.yaml_reader.StackedAreaDefinition') + @patch("satpy.readers.yaml_reader.StackedAreaDefinition") def test_load_area_def(self, sad): """Test loading the area def for the reader.""" dataid = MagicMock() @@ -502,61 +491,59 @@ def test_load_area_def(self, sad): for _i in range(items): file_handlers.append(MagicMock()) final_area = self.reader._load_area_def(dataid, file_handlers) - self.assertEqual(final_area, sad.return_value.squeeze.return_value) + assert final_area == sad.return_value.squeeze.return_value args, kwargs = sad.call_args - self.assertEqual(len(args), items) + assert len(args) == items def test_preferred_filetype(self): """Test finding the preferred filetype.""" - self.reader.file_handlers = {'a': 'a', 'b': 'b', 'c': 'c'} - self.assertEqual(self.reader._preferred_filetype(['c', 'a']), 'c') - self.assertEqual(self.reader._preferred_filetype(['a', 'c']), 'a') - self.assertEqual(self.reader._preferred_filetype(['d', 'e']), None) + self.reader.file_handlers = {"a": "a", "b": "b", "c": "c"} + assert self.reader._preferred_filetype(["c", "a"]) == "c" + assert self.reader._preferred_filetype(["a", "c"]) == "a" + assert self.reader._preferred_filetype(["d", "e"]) is None def test_get_coordinates_for_dataset_key(self): """Test getting coordinates for a key.""" - ds_q = DataQuery(name='ch01', wavelength=(0.5, 0.6, 0.7, 'µm'), - calibration='reflectance', modifiers=()) + ds_q = DataQuery(name="ch01", wavelength=(0.5, 0.6, 0.7, "µm"), + calibration="reflectance", modifiers=()) res = self.reader._get_coordinates_for_dataset_key(ds_q) - self.assertListEqual(res, - [make_dataid(name='lons'), - make_dataid(name='lats')]) + assert res == [make_dataid(name="lons"), make_dataid(name="lats")] def test_get_coordinates_for_dataset_key_without(self): """Test getting coordinates for a key without coordinates.""" - ds_id = make_dataid(name='lons', + ds_id = make_dataid(name="lons", modifiers=()) res = self.reader._get_coordinates_for_dataset_key(ds_id) - self.assertListEqual(res, []) + assert res == [] def test_get_coordinates_for_dataset_keys(self): """Test getting coordinates for keys.""" - ds_id1 = make_dataid(name='ch01', wavelength=(0.5, 0.6, 0.7), - calibration='reflectance', modifiers=()) - ds_id2 = make_dataid(name='ch02', wavelength=(0.7, 0.75, 0.8), - calibration='counts', modifiers=()) - lons = make_dataid(name='lons', modifiers=()) - lats = make_dataid(name='lats', modifiers=()) + ds_id1 = make_dataid(name="ch01", wavelength=(0.5, 0.6, 0.7), + calibration="reflectance", modifiers=()) + ds_id2 = make_dataid(name="ch02", wavelength=(0.7, 0.75, 0.8), + calibration="counts", modifiers=()) + lons = make_dataid(name="lons", modifiers=()) + lats = make_dataid(name="lats", modifiers=()) res = self.reader._get_coordinates_for_dataset_keys([ds_id1, ds_id2, lons]) expected = {ds_id1: [lons, lats], ds_id2: [lons, lats], lons: []} - self.assertDictEqual(res, expected) + assert res == expected def test_get_file_handlers(self): """Test getting filehandler to load a dataset.""" - ds_id1 = make_dataid(name='ch01', wavelength=(0.5, 0.6, 0.7), - calibration='reflectance', modifiers=()) - self.reader.file_handlers = {'ftype1': 'bla'} + ds_id1 = make_dataid(name="ch01", wavelength=(0.5, 0.6, 0.7), + calibration="reflectance", modifiers=()) + self.reader.file_handlers = {"ftype1": "bla"} - self.assertEqual(self.reader._get_file_handlers(ds_id1), 'bla') + assert self.reader._get_file_handlers(ds_id1) == "bla" - lons = make_dataid(name='lons', modifiers=()) - self.assertEqual(self.reader._get_file_handlers(lons), None) + lons = make_dataid(name="lons", modifiers=()) + assert self.reader._get_file_handlers(lons) is None - @patch('satpy.readers.yaml_reader.xr') + @patch("satpy.readers.yaml_reader.xr") def test_load_entire_dataset(self, xarray): """Check loading an entire dataset.""" file_handlers = [FakeFH(None, None), FakeFH(None, None), @@ -564,7 +551,7 @@ def test_load_entire_dataset(self, xarray): proj = self.reader._load_dataset(None, {}, file_handlers) - self.assertIs(proj, xarray.concat.return_value) + assert proj is xarray.concat.return_value class TestFileYAMLReaderLoading(unittest.TestCase): @@ -572,69 +559,69 @@ class TestFileYAMLReaderLoading(unittest.TestCase): def setUp(self): """Prepare a reader instance with a fake config.""" - patterns = ['a{something:3s}.bla'] - res_dict = {'reader': {'name': 'fake', - 'sensors': ['canon']}, - 'file_types': {'ftype1': {'name': 'ft1', - 'file_reader': BaseFileHandler, - 'file_patterns': patterns}}, - 'datasets': {'ch1': {'name': 'ch01', - 'wavelength': [0.5, 0.6, 0.7], - 'calibration': 'reflectance', - 'file_type': 'ftype1'}, + patterns = ["a{something:3s}.bla"] + res_dict = {"reader": {"name": "fake", + "sensors": ["canon"]}, + "file_types": {"ftype1": {"name": "ft1", + "file_reader": BaseFileHandler, + "file_patterns": patterns}}, + "datasets": {"ch1": {"name": "ch01", + "wavelength": [0.5, 0.6, 0.7], + "calibration": "reflectance", + "file_type": "ftype1"}, }} self.config = res_dict self.reader = yr.FileYAMLReader(res_dict, filter_parameters={ - 'start_time': datetime(2000, 1, 1), - 'end_time': datetime(2000, 1, 2), + "start_time": datetime(2000, 1, 1), + "end_time": datetime(2000, 1, 2), }) fake_fh = FakeFH(None, None) self.lons = xr.DataArray(np.ones((2, 2)) * 2, - dims=['y', 'x'], - attrs={'standard_name': 'longitude', - 'name': 'longitude'}) + dims=["y", "x"], + attrs={"standard_name": "longitude", + "name": "longitude"}) self.lats = xr.DataArray(np.ones((2, 2)) * 2, - dims=['y', 'x'], - attrs={'standard_name': 'latitude', - 'name': 'latitude'}) + dims=["y", "x"], + attrs={"standard_name": "latitude", + "name": "latitude"}) self.data = None def _assign_array(dsid, *_args, **_kwargs): - if dsid['name'] == 'longitude': + if dsid["name"] == "longitude": return self.lons - if dsid['name'] == 'latitude': + if dsid["name"] == "latitude": return self.lats return self.data fake_fh.get_dataset.side_effect = _assign_array - self.reader.file_handlers = {'ftype1': [fake_fh]} + self.reader.file_handlers = {"ftype1": [fake_fh]} def test_load_dataset_with_builtin_coords(self): """Test loading a dataset with builtin coordinates.""" self.data = xr.DataArray(np.ones((2, 2)), - coords={'longitude': self.lons, - 'latitude': self.lats}, - dims=['y', 'x']) + coords={"longitude": self.lons, + "latitude": self.lats}, + dims=["y", "x"]) self._check_area_for_ch01() def test_load_dataset_with_builtin_coords_in_wrong_order(self): """Test loading a dataset with builtin coordinates in the wrong order.""" self.data = xr.DataArray(np.ones((2, 2)), - coords={'latitude': self.lats, - 'longitude': self.lons}, - dims=['y', 'x']) + coords={"latitude": self.lats, + "longitude": self.lons}, + dims=["y", "x"]) self._check_area_for_ch01() def _check_area_for_ch01(self): - res = self.reader.load(['ch01']) - assert 'area' in res['ch01'].attrs - np.testing.assert_array_equal(res['ch01'].attrs['area'].lons, self.lons) - np.testing.assert_array_equal(res['ch01'].attrs['area'].lats, self.lats) - assert res['ch01'].attrs.get("reader") == "fake" + res = self.reader.load(["ch01"]) + assert "area" in res["ch01"].attrs + np.testing.assert_array_equal(res["ch01"].attrs["area"].lons, self.lons) + np.testing.assert_array_equal(res["ch01"].attrs["area"].lats, self.lats) + assert res["ch01"].attrs.get("reader") == "fake" class TestFileFileYAMLReaderMultipleFileTypes(unittest.TestCase): @@ -648,36 +635,36 @@ def setUp(self): # # For test completeness add one channel (ch3) which is only available # in ftype1. - patterns1 = ['a.nc'] - patterns2 = ['b.nc'] - patterns3 = ['geo.nc'] - res_dict = {'reader': {'name': 'fake', - 'sensors': ['canon']}, - 'file_types': {'ftype1': {'name': 'ft1', - 'file_patterns': patterns1}, - 'ftype2': {'name': 'ft2', - 'file_patterns': patterns2}, - 'ftype3': {'name': 'ft3', - 'file_patterns': patterns3}}, - 'datasets': {'ch1': {'name': 'ch01', - 'wavelength': [0.5, 0.6, 0.7], - 'calibration': 'reflectance', - 'file_type': ['ftype1', 'ftype2'], - 'coordinates': ['lons', 'lats']}, - 'ch2': {'name': 'ch02', - 'wavelength': [0.7, 0.75, 0.8], - 'calibration': 'counts', - 'file_type': ['ftype1', 'ftype2'], - 'coordinates': ['lons', 'lats']}, - 'ch3': {'name': 'ch03', - 'wavelength': [0.8, 0.85, 0.9], - 'calibration': 'counts', - 'file_type': 'ftype1', - 'coordinates': ['lons', 'lats']}, - 'lons': {'name': 'lons', - 'file_type': ['ftype1', 'ftype3']}, - 'lats': {'name': 'lats', - 'file_type': ['ftype1', 'ftype3']}}} + patterns1 = ["a.nc"] + patterns2 = ["b.nc"] + patterns3 = ["geo.nc"] + res_dict = {"reader": {"name": "fake", + "sensors": ["canon"]}, + "file_types": {"ftype1": {"name": "ft1", + "file_patterns": patterns1}, + "ftype2": {"name": "ft2", + "file_patterns": patterns2}, + "ftype3": {"name": "ft3", + "file_patterns": patterns3}}, + "datasets": {"ch1": {"name": "ch01", + "wavelength": [0.5, 0.6, 0.7], + "calibration": "reflectance", + "file_type": ["ftype1", "ftype2"], + "coordinates": ["lons", "lats"]}, + "ch2": {"name": "ch02", + "wavelength": [0.7, 0.75, 0.8], + "calibration": "counts", + "file_type": ["ftype1", "ftype2"], + "coordinates": ["lons", "lats"]}, + "ch3": {"name": "ch03", + "wavelength": [0.8, 0.85, 0.9], + "calibration": "counts", + "file_type": "ftype1", + "coordinates": ["lons", "lats"]}, + "lons": {"name": "lons", + "file_type": ["ftype1", "ftype3"]}, + "lats": {"name": "lats", + "file_type": ["ftype1", "ftype3"]}}} self.config = res_dict self.reader = yr.FileYAMLReader(self.config) @@ -687,13 +674,13 @@ def test_update_ds_ids_from_file_handlers(self): from functools import partial orig_ids = self.reader.all_ids - for ftype, resol in zip(('ftype1', 'ftype2'), (1, 2)): + for ftype, resol in zip(("ftype1", "ftype2"), (1, 2)): # need to copy this because the dataset infos will be modified _orig_ids = {key: val.copy() for key, val in orig_ids.items()} with patch.dict(self.reader.all_ids, _orig_ids, clear=True), \ patch.dict(self.reader.available_ids, {}, clear=True): # Add a file handler with resolution property - fh = MagicMock(filetype_info={'file_type': ftype}, + fh = MagicMock(filetype_info={"file_type": ftype}, resolution=resol) fh.available_datasets = partial(available_datasets, fh) fh.file_type_matches = partial(file_type_matches, fh) @@ -707,11 +694,11 @@ def test_update_ds_ids_from_file_handlers(self): # Make sure the resolution property has been transferred # correctly from the file handler to the dataset ID for ds_id, ds_info in self.reader.all_ids.items(): - file_types = ds_info['file_type'] + file_types = ds_info["file_type"] if not isinstance(file_types, list): file_types = [file_types] if ftype in file_types: - self.assertEqual(resol, ds_id['resolution']) + assert resol == ds_id["resolution"] # Test methods @@ -725,10 +712,10 @@ def available_datasets(self, configured_datasets=None): if is_avail is not None: yield is_avail, ds_info - matches = self.file_type_matches(ds_info['file_type']) - if matches and ds_info.get('resolution') != res: + matches = self.file_type_matches(ds_info["file_type"]) + if matches and ds_info.get("resolution") != res: new_info = ds_info.copy() - new_info['resolution'] = res + new_info["resolution"] = res yield True, new_info elif is_avail is None: yield is_avail, ds_info @@ -736,9 +723,9 @@ def available_datasets(self, configured_datasets=None): def file_type_matches(self, ds_ftype): """Fake file_type_matches for testing multiple file types.""" - if isinstance(ds_ftype, str) and ds_ftype == self.filetype_info['file_type']: + if isinstance(ds_ftype, str) and ds_ftype == self.filetype_info["file_type"]: return True - if self.filetype_info['file_type'] in ds_ftype: + if self.filetype_info["file_type"] in ds_ftype: return True return None @@ -764,94 +751,94 @@ def test_load_dataset_with_area_for_single_areas(self, ldwa): original_array = np.arange(6).reshape((2, 3)) area_def = AreaDefinition( - 'test', - 'test', - 'test', - {'proj': 'geos', - 'h': 35785831, - 'type': 'crs'}, + "test", + "test", + "test", + {"proj": "geos", + "h": 35785831, + "type": "crs"}, 3, 2, original_area_extent, ) dummy_ds_xr = xr.DataArray(original_array, - coords={'y': np.arange(2), - 'x': np.arange(3), - 'time': ("y", np.arange(2))}, - attrs={'area': area_def}, - dims=('y', 'x')) + coords={"y": np.arange(2), + "x": np.arange(3), + "time": ("y", np.arange(2))}, + attrs={"area": area_def}, + dims=("y", "x")) # assign the dummy xr as return for the super _load_dataset_with_area method ldwa.return_value = dummy_ds_xr # check no input, nothing should change res = reader._load_dataset_with_area(dsid, coords) np.testing.assert_equal(res.values, original_array) - np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) - np.testing.assert_equal(res.coords['y'], np.arange(2)) - np.testing.assert_equal(res.coords['x'], np.arange(3)) - np.testing.assert_equal(res.coords['time'], np.arange(2)) + np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) + np.testing.assert_equal(res.coords["y"], np.arange(2)) + np.testing.assert_equal(res.coords["x"], np.arange(3)) + np.testing.assert_equal(res.coords["time"], np.arange(2)) # check wrong input with self.assertRaises(ValueError): - _ = reader._load_dataset_with_area(dsid, coords, 'wronginput') + _ = reader._load_dataset_with_area(dsid, coords, "wronginput") # check native orientation, nothing should change - res = reader._load_dataset_with_area(dsid, coords, 'native') + res = reader._load_dataset_with_area(dsid, coords, "native") np.testing.assert_equal(res.values, original_array) - np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) - np.testing.assert_equal(res.coords['y'], np.arange(2)) - np.testing.assert_equal(res.coords['x'], np.arange(3)) - np.testing.assert_equal(res.coords['time'], np.arange(2)) + np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) + np.testing.assert_equal(res.coords["y"], np.arange(2)) + np.testing.assert_equal(res.coords["x"], np.arange(3)) + np.testing.assert_equal(res.coords["time"], np.arange(2)) # check upright orientation, nothing should change since area is already upright - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, original_array) - np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) - np.testing.assert_equal(res.coords['y'], np.arange(2)) - np.testing.assert_equal(res.coords['x'], np.arange(3)) - np.testing.assert_equal(res.coords['time'], np.arange(2)) + np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) + np.testing.assert_equal(res.coords["y"], np.arange(2)) + np.testing.assert_equal(res.coords["x"], np.arange(3)) + np.testing.assert_equal(res.coords["time"], np.arange(2)) # check that left-right image is flipped correctly - dummy_ds_xr.attrs['area'] = area_def.copy(area_extent=(1500, -1000, -1500, 1000)) + dummy_ds_xr.attrs["area"] = area_def.copy(area_extent=(1500, -1000, -1500, 1000)) ldwa.return_value = dummy_ds_xr.copy() - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, np.fliplr(original_array)) - np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) - np.testing.assert_equal(res.coords['y'], np.arange(2)) - np.testing.assert_equal(res.coords['x'], np.flip(np.arange(3))) - np.testing.assert_equal(res.coords['time'], np.arange(2)) + np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) + np.testing.assert_equal(res.coords["y"], np.arange(2)) + np.testing.assert_equal(res.coords["x"], np.flip(np.arange(3))) + np.testing.assert_equal(res.coords["time"], np.arange(2)) # check that upside down image is flipped correctly - dummy_ds_xr.attrs['area'] = area_def.copy(area_extent=(-1500, 1000, 1500, -1000)) + dummy_ds_xr.attrs["area"] = area_def.copy(area_extent=(-1500, 1000, 1500, -1000)) ldwa.return_value = dummy_ds_xr.copy() - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, np.flipud(original_array)) - np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) - np.testing.assert_equal(res.coords['y'], np.flip(np.arange(2))) - np.testing.assert_equal(res.coords['x'], np.arange(3)) - np.testing.assert_equal(res.coords['time'], np.flip(np.arange(2))) + np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) + np.testing.assert_equal(res.coords["y"], np.flip(np.arange(2))) + np.testing.assert_equal(res.coords["x"], np.arange(3)) + np.testing.assert_equal(res.coords["time"], np.flip(np.arange(2))) # check different projection than geos, nothing should be changed area_def = AreaDefinition( - 'test', - 'test', - 'test', - {'proj': 'lcc', - 'lat_1': 25.0, - 'type': 'crs'}, + "test", + "test", + "test", + {"proj": "lcc", + "lat_1": 25.0, + "type": "crs"}, 3, 2, original_area_extent, ) dummy_ds_xr = xr.DataArray(original_array, - dims=('y', 'x'), - attrs={'area': area_def}) + dims=("y", "x"), + attrs={"area": area_def}) ldwa.return_value = dummy_ds_xr - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, original_array) - np.testing.assert_equal(res.attrs['area'].area_extent, original_area_extent) + np.testing.assert_equal(res.attrs["area"].area_extent, original_area_extent) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch.object(yr.FileYAMLReader, "_load_dataset_with_area") @@ -871,12 +858,12 @@ def test_load_dataset_with_area_for_stacked_areas(self, ldwa): original_array = np.arange(12).reshape((4, 3)) area_def0 = AreaDefinition( - 'test', - 'test', - 'test', - {'proj': 'geos', - 'h': 35785831, - 'type': 'crs'}, + "test", + "test", + "test", + {"proj": "geos", + "h": 35785831, + "type": "crs"}, 3, 2, original_area_extents[0], @@ -884,36 +871,36 @@ def test_load_dataset_with_area_for_stacked_areas(self, ldwa): area_def1 = area_def0.copy(area_extent=original_area_extents[1]) dummy_ds_xr = xr.DataArray(original_array, - dims=('y', 'x'), - coords={'y': np.arange(4), - 'x': np.arange(3), - 'time': ("y", np.arange(4))}, - attrs={'area': StackedAreaDefinition(area_def0, area_def1)}) + dims=("y", "x"), + coords={"y": np.arange(4), + "x": np.arange(3), + "time": ("y", np.arange(4))}, + attrs={"area": StackedAreaDefinition(area_def0, area_def1)}) # check that left-right image is flipped correctly - dummy_ds_xr.attrs['area'].defs[0] = area_def0.copy(area_extent=(1500, -1000, -1500, 1000)) - dummy_ds_xr.attrs['area'].defs[1] = area_def1.copy(area_extent=(7000, 5000, 3000, 8000)) + dummy_ds_xr.attrs["area"].defs[0] = area_def0.copy(area_extent=(1500, -1000, -1500, 1000)) + dummy_ds_xr.attrs["area"].defs[1] = area_def1.copy(area_extent=(7000, 5000, 3000, 8000)) ldwa.return_value = dummy_ds_xr.copy() - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, np.fliplr(original_array)) - np.testing.assert_equal(res.attrs['area'].defs[0].area_extent, original_area_extents[0]) - np.testing.assert_equal(res.attrs['area'].defs[1].area_extent, original_area_extents[1]) - np.testing.assert_equal(res.coords['y'], np.arange(4)) - np.testing.assert_equal(res.coords['x'], np.flip(np.arange(3))) - np.testing.assert_equal(res.coords['time'], np.arange(4)) + np.testing.assert_equal(res.attrs["area"].defs[0].area_extent, original_area_extents[0]) + np.testing.assert_equal(res.attrs["area"].defs[1].area_extent, original_area_extents[1]) + np.testing.assert_equal(res.coords["y"], np.arange(4)) + np.testing.assert_equal(res.coords["x"], np.flip(np.arange(3))) + np.testing.assert_equal(res.coords["time"], np.arange(4)) # check that upside down image is flipped correctly - dummy_ds_xr.attrs['area'].defs[0] = area_def0.copy(area_extent=(-1500, 1000, 1500, -1000)) - dummy_ds_xr.attrs['area'].defs[1] = area_def1.copy(area_extent=(3000, 8000, 7000, 5000)) + dummy_ds_xr.attrs["area"].defs[0] = area_def0.copy(area_extent=(-1500, 1000, 1500, -1000)) + dummy_ds_xr.attrs["area"].defs[1] = area_def1.copy(area_extent=(3000, 8000, 7000, 5000)) ldwa.return_value = dummy_ds_xr.copy() - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, np.flipud(original_array)) # note that the order of the stacked areadefs is flipped here, as expected - np.testing.assert_equal(res.attrs['area'].defs[1].area_extent, original_area_extents[0]) - np.testing.assert_equal(res.attrs['area'].defs[0].area_extent, original_area_extents[1]) - np.testing.assert_equal(res.coords['y'], np.flip(np.arange(4))) - np.testing.assert_equal(res.coords['x'], np.arange(3)) - np.testing.assert_equal(res.coords['time'], np.flip(np.arange(4))) + np.testing.assert_equal(res.attrs["area"].defs[1].area_extent, original_area_extents[0]) + np.testing.assert_equal(res.attrs["area"].defs[0].area_extent, original_area_extents[1]) + np.testing.assert_equal(res.coords["y"], np.flip(np.arange(4))) + np.testing.assert_equal(res.coords["x"], np.arange(3)) + np.testing.assert_equal(res.coords["time"], np.flip(np.arange(4))) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch.object(yr.FileYAMLReader, "_load_dataset_with_area") @@ -936,15 +923,15 @@ def test_load_dataset_with_area_for_swath_def_data(self, ldwa): swath_def = SwathDefinition(lons, lats) dummy_ds_xr = xr.DataArray(original_array, - coords={'y': dim}, - attrs={'area': swath_def}, - dims=('y',)) + coords={"y": dim}, + attrs={"area": swath_def}, + dims=("y",)) # assign the dummy xr as return for the super _load_dataset_with_area method ldwa.return_value = dummy_ds_xr # returned dataset should be unchanged since datasets with a swath definition are not flippable - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, original_array) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @@ -963,21 +950,21 @@ def test_load_dataset_with_area_for_data_without_area(self, ldwa): dim = np.arange(3) dummy_ds_xr = xr.DataArray(original_array, - coords={'y': dim}, + coords={"y": dim}, attrs={}, - dims=('y',)) + dims=("y",)) # assign the dummy xr as return for the super _load_dataset_with_area method ldwa.return_value = dummy_ds_xr # returned dataset should be unchanged since datasets without area information are not flippable - res = reader._load_dataset_with_area(dsid, coords, 'NE') + res = reader._load_dataset_with_area(dsid, coords, "NE") np.testing.assert_equal(res.values, original_array) def _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info): seg_area = MagicMock() - seg_area.crs = 'some_crs' + seg_area.crs = "some_crs" seg_area.area_extent = aex seg_area.shape = ashape get_area_def = MagicMock() @@ -987,9 +974,9 @@ def _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_p get_segment_position_info.return_value = chk_pos_info fh = MagicMock() - filetype_info = {'expected_segments': expected_segments, - 'file_type': 'filetype1'} - filename_info = {'segment': segment} + filetype_info = {"expected_segments": expected_segments, + "file_type": "filetype1"} + filename_info = {"segment": segment} fh.filetype_info = filetype_info fh.filename_info = filename_info fh.get_area_def = get_area_def @@ -1011,44 +998,44 @@ def test_get_expected_segments(self, cfh): fake_fh = MagicMock() fake_fh.filename_info = {} fake_fh.filetype_info = {} - cfh.return_value = {'ft1': [fake_fh]} + cfh.return_value = {"ft1": [fake_fh]} # default (1) - created_fhs = reader.create_filehandlers(['fake.nc']) - es = created_fhs['ft1'][0].filetype_info['expected_segments'] - self.assertEqual(es, 1) + created_fhs = reader.create_filehandlers(["fake.nc"]) + es = created_fhs["ft1"][0].filetype_info["expected_segments"] + assert es == 1 # YAML defined for each file type - fake_fh.filetype_info['expected_segments'] = 2 - created_fhs = reader.create_filehandlers(['fake.nc']) - es = created_fhs['ft1'][0].filetype_info['expected_segments'] - self.assertEqual(es, 2) + fake_fh.filetype_info["expected_segments"] = 2 + created_fhs = reader.create_filehandlers(["fake.nc"]) + es = created_fhs["ft1"][0].filetype_info["expected_segments"] + assert es == 2 # defined both in the filename and the YAML metadata # YAML has priority - fake_fh.filename_info = {'total_segments': 3} - fake_fh.filetype_info = {'expected_segments': 2} - created_fhs = reader.create_filehandlers(['fake.nc']) - es = created_fhs['ft1'][0].filetype_info['expected_segments'] - self.assertEqual(es, 2) + fake_fh.filename_info = {"total_segments": 3} + fake_fh.filetype_info = {"expected_segments": 2} + created_fhs = reader.create_filehandlers(["fake.nc"]) + es = created_fhs["ft1"][0].filetype_info["expected_segments"] + assert es == 2 # defined in the filename - fake_fh.filename_info = {'total_segments': 3} + fake_fh.filename_info = {"total_segments": 3} fake_fh.filetype_info = {} - created_fhs = reader.create_filehandlers(['fake.nc']) - es = created_fhs['ft1'][0].filetype_info['expected_segments'] - self.assertEqual(es, 3) + created_fhs = reader.create_filehandlers(["fake.nc"]) + es = created_fhs["ft1"][0].filetype_info["expected_segments"] + assert es == 3 # check correct FCI segment (aka chunk in the FCI world) number reading into segment - fake_fh.filename_info = {'count_in_repeat_cycle': 5} - created_fhs = reader.create_filehandlers(['fake.nc']) - es = created_fhs['ft1'][0].filename_info['segment'] - self.assertEqual(es, 5) + fake_fh.filename_info = {"count_in_repeat_cycle": 5} + created_fhs = reader.create_filehandlers(["fake.nc"]) + es = created_fhs["ft1"][0].filename_info["segment"] + assert es == 5 @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) - @patch('satpy.readers.yaml_reader.FileYAMLReader._load_dataset') - @patch('satpy.readers.yaml_reader.xr') - @patch('satpy.readers.yaml_reader._find_missing_segments') + @patch("satpy.readers.yaml_reader.FileYAMLReader._load_dataset") + @patch("satpy.readers.yaml_reader.xr") + @patch("satpy.readers.yaml_reader._find_missing_segments") def test_load_dataset(self, mss, xr, parent_load_dataset): """Test _load_dataset().""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader @@ -1066,7 +1053,7 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): # Setup input, and output of mocked functions counter = 9 expected_segments = 8 - seg = MagicMock(dims=['y', 'x']) + seg = MagicMock(dims=["y", "x"]) slice_list = expected_segments * [seg, ] failure = False projectable = MagicMock() @@ -1082,8 +1069,8 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): # No missing segments res = reader._load_dataset(dataid, ds_info, file_handlers) - self.assertTrue(res.attrs is file_handlers[0].combine_info.return_value) - self.assertTrue(empty_segment not in slice_list) + assert res.attrs is file_handlers[0].combine_info.return_value + assert empty_segment not in slice_list # One missing segment in the middle slice_list[4] = None @@ -1091,7 +1078,7 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) - self.assertTrue(slice_list[4] is empty_segment) + assert slice_list[4] is empty_segment # The last segment is missing slice_list = expected_segments * [seg, ] @@ -1100,7 +1087,7 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) - self.assertTrue(slice_list[-1] is empty_segment) + assert slice_list[-1] is empty_segment # The last two segments are missing slice_list = expected_segments * [seg, ] @@ -1109,8 +1096,8 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) - self.assertTrue(slice_list[-1] is empty_segment) - self.assertTrue(slice_list[-2] is empty_segment) + assert slice_list[-1] is empty_segment + assert slice_list[-2] is empty_segment # The first segment is missing slice_list = expected_segments * [seg, ] @@ -1119,7 +1106,7 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) - self.assertTrue(slice_list[0] is empty_segment) + assert slice_list[0] is empty_segment # The first two segments are missing slice_list = expected_segments * [seg, ] @@ -1129,8 +1116,8 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): mss.return_value = (counter, expected_segments, slice_list, failure, projectable) res = reader._load_dataset(dataid, ds_info, file_handlers) - self.assertTrue(slice_list[0] is empty_segment) - self.assertTrue(slice_list[1] is empty_segment) + assert slice_list[0] is empty_segment + assert slice_list[1] is empty_segment # Disable padding res = reader._load_dataset(dataid, ds_info, file_handlers, @@ -1139,10 +1126,10 @@ def test_load_dataset(self, mss, xr, parent_load_dataset): file_handlers) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) - @patch('satpy.readers.yaml_reader._load_area_def') - @patch('satpy.readers.yaml_reader._stack_area_defs') - @patch('satpy.readers.yaml_reader.GEOSegmentYAMLReader._pad_earlier_segments_area') - @patch('satpy.readers.yaml_reader.GEOSegmentYAMLReader._pad_later_segments_area') + @patch("satpy.readers.yaml_reader._load_area_def") + @patch("satpy.readers.yaml_reader._stack_area_defs") + @patch("satpy.readers.yaml_reader.GEOSegmentYAMLReader._pad_earlier_segments_area") + @patch("satpy.readers.yaml_reader.GEOSegmentYAMLReader._pad_later_segments_area") def test_load_area_def(self, pesa, plsa, sad, parent_load_area_def): """Test _load_area_def().""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader @@ -1160,7 +1147,7 @@ def test_load_area_def(self, pesa, plsa, sad, parent_load_area_def): parent_load_area_def.assert_called_once_with(dataid, file_handlers) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) - @patch('satpy.readers.yaml_reader.AreaDefinition') + @patch("satpy.readers.yaml_reader.AreaDefinition") def test_pad_later_segments_area(self, AreaDefinition): """Test _pad_later_segments_area().""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader @@ -1172,16 +1159,16 @@ def test_pad_later_segments_area(self, AreaDefinition): ashape = [200, 500] fh_1, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, None) file_handlers = [fh_1] - dataid = 'dataid' + dataid = "dataid" res = reader._pad_later_segments_area(file_handlers, dataid) - self.assertEqual(len(res), 2) + assert len(res) == 2 seg2_extent = (0, 1500, 200, 1000) - expected_call = ('fill', 'fill', 'fill', 'some_crs', 500, 200, + expected_call = ("fill", "fill", "fill", "some_crs", 500, 200, seg2_extent) AreaDefinition.assert_called_once_with(*expected_call) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) - @patch('satpy.readers.yaml_reader.AreaDefinition') + @patch("satpy.readers.yaml_reader.AreaDefinition") def test_pad_earlier_segments_area(self, AreaDefinition): """Test _pad_earlier_segments_area().""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader @@ -1194,12 +1181,12 @@ def test_pad_earlier_segments_area(self, AreaDefinition): fh_2, seg2_area = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, None) file_handlers = [fh_2] - dataid = 'dataid' + dataid = "dataid" area_defs = {2: seg2_area} res = reader._pad_earlier_segments_area(file_handlers, dataid, area_defs) - self.assertEqual(len(res), 2) + assert len(res) == 2 seg1_extent = (0, 500, 200, 0) - expected_call = ('fill', 'fill', 'fill', 'some_crs', 500, 200, + expected_call = ("fill", "fill", "fill", "some_crs", 500, 200, seg1_extent) AreaDefinition.assert_called_once_with(*expected_call) @@ -1208,46 +1195,46 @@ def test_find_missing_segments(self): from satpy.readers.yaml_reader import _find_missing_segments as fms # Dataset with only one segment - filename_info = {'segment': 1} + filename_info = {"segment": 1} fh_seg1 = MagicMock(filename_info=filename_info) - projectable = 'projectable' + projectable = "projectable" get_dataset = MagicMock() get_dataset.return_value = projectable fh_seg1.get_dataset = get_dataset file_handlers = [fh_seg1] - ds_info = {'file_type': []} - dataid = 'dataid' + ds_info = {"file_type": []} + dataid = "dataid" res = fms(file_handlers, ds_info, dataid) counter, expected_segments, slice_list, failure, proj = res - self.assertEqual(counter, 2) - self.assertEqual(expected_segments, 1) - self.assertTrue(projectable in slice_list) - self.assertFalse(failure) - self.assertTrue(proj is projectable) + assert counter == 2 + assert expected_segments == 1 + assert projectable in slice_list + assert failure is False + assert proj is projectable # Three expected segments, first and last missing - filename_info = {'segment': 2} - filetype_info = {'expected_segments': 3, - 'file_type': 'foo'} + filename_info = {"segment": 2} + filetype_info = {"expected_segments": 3, + "file_type": "foo"} fh_seg2 = MagicMock(filename_info=filename_info, filetype_info=filetype_info) - projectable = 'projectable' + projectable = "projectable" get_dataset = MagicMock() get_dataset.return_value = projectable fh_seg2.get_dataset = get_dataset file_handlers = [fh_seg2] - ds_info = {'file_type': ['foo']} - dataid = 'dataid' + ds_info = {"file_type": ["foo"]} + dataid = "dataid" res = fms(file_handlers, ds_info, dataid) counter, expected_segments, slice_list, failure, proj = res - self.assertEqual(counter, 3) - self.assertEqual(expected_segments, 3) - self.assertEqual(slice_list, [None, projectable, None]) - self.assertFalse(failure) - self.assertTrue(proj is projectable) + assert counter == 3 + assert expected_segments == 3 + assert slice_list == [None, projectable, None] + assert failure is False + assert proj is projectable -@pytest.fixture +@pytest.fixture() @patch.object(yr.GEOVariableSegmentYAMLReader, "__init__", lambda x: None) def GVSYReader(): """Get a fixture of the GEOVariableSegmentYAMLReader.""" @@ -1258,31 +1245,31 @@ def GVSYReader(): return reader -@pytest.fixture +@pytest.fixture() def fake_geswh(): """Get a fixture of the patched _get_empty_segment_with_height.""" - with patch('satpy.readers.yaml_reader._get_empty_segment_with_height') as geswh: + with patch("satpy.readers.yaml_reader._get_empty_segment_with_height") as geswh: yield geswh -@pytest.fixture +@pytest.fixture() def fake_xr(): """Get a fixture of the patched xarray.""" - with patch('satpy.readers.yaml_reader.xr') as xr: + with patch("satpy.readers.yaml_reader.xr") as xr: yield xr -@pytest.fixture +@pytest.fixture() def fake_mss(): """Get a fixture of the patched _find_missing_segments.""" - with patch('satpy.readers.yaml_reader._find_missing_segments') as mss: + with patch("satpy.readers.yaml_reader._find_missing_segments") as mss: yield mss -@pytest.fixture +@pytest.fixture() def fake_adef(): """Get a fixture of the patched AreaDefinition.""" - with patch('satpy.readers.yaml_reader.AreaDefinition') as adef: + with patch("satpy.readers.yaml_reader.AreaDefinition") as adef: yield adef @@ -1293,14 +1280,14 @@ def test_get_empty_segment(self, GVSYReader, fake_mss, fake_xr, fake_geswh): """Test execution of (overridden) get_empty_segment inside _load_dataset.""" # Setup input, and output of mocked functions for first segment missing chk_pos_info = { - '1km': {'start_position_row': 0, - 'end_position_row': 0, - 'segment_height': 0, - 'grid_width': 11136}, - '2km': {'start_position_row': 140, - 'end_position_row': None, - 'segment_height': 278, - 'grid_width': 5568} + "1km": {"start_position_row": 0, + "end_position_row": 0, + "segment_height": 0, + "grid_width": 11136}, + "2km": {"start_position_row": 140, + "end_position_row": None, + "segment_height": 278, + "grid_width": 5568} } expected_segments = 2 segment = 2 @@ -1308,10 +1295,10 @@ def test_get_empty_segment(self, GVSYReader, fake_mss, fake_xr, fake_geswh): ashape = [278, 5568] fh_2, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) - GVSYReader.file_handlers = {'filetype1': [fh_2]} + GVSYReader.file_handlers = {"filetype1": [fh_2]} counter = 2 - seg = MagicMock(dims=['y', 'x']) + seg = MagicMock(dims=["y", "x"]) slice_list = [None, seg] failure = False projectable = MagicMock() @@ -1325,20 +1312,20 @@ def test_get_empty_segment(self, GVSYReader, fake_mss, fake_xr, fake_geswh): failure, projectable) GVSYReader._load_dataset(dataid, ds_info, [fh_2]) # the return of get_empty_segment - fake_geswh.assert_called_once_with(empty_segment, 139, dim='y') + fake_geswh.assert_called_once_with(empty_segment, 139, dim="y") def test_pad_earlier_segments_area(self, GVSYReader, fake_adef): """Test _pad_earlier_segments_area() for the variable segment case.""" # setting to 0 or None values that shouldn't be relevant chk_pos_info = { - '1km': {'start_position_row': 0, - 'end_position_row': 0, - 'segment_height': 0, - 'grid_width': 11136}, - '2km': {'start_position_row': 140, - 'end_position_row': None, - 'segment_height': 278, - 'grid_width': 5568} + "1km": {"start_position_row": 0, + "end_position_row": 0, + "segment_height": 0, + "grid_width": 11136}, + "2km": {"start_position_row": 140, + "end_position_row": None, + "segment_height": 278, + "grid_width": 5568} } expected_segments = 2 segment = 2 @@ -1346,8 +1333,8 @@ def test_pad_earlier_segments_area(self, GVSYReader, fake_adef): ashape = [278, 5568] fh_2, seg2_area = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) - GVSYReader.file_handlers = {'filetype1': [fh_2]} - dataid = 'dataid' + GVSYReader.file_handlers = {"filetype1": [fh_2]} + dataid = "dataid" area_defs = {2: seg2_area} res = GVSYReader._pad_earlier_segments_area([fh_2], dataid, area_defs) assert len(res) == 2 @@ -1358,29 +1345,29 @@ def test_pad_earlier_segments_area(self, GVSYReader, fake_adef): # half of the previous size (1000-500)/2=250. # The new area extent lower-left row is therefore 500-250=250 seg1_extent = (0, 500, 200, 250) - expected_call = ('fill', 'fill', 'fill', 'some_crs', 5568, 139, + expected_call = ("fill", "fill", "fill", "some_crs", 5568, 139, seg1_extent) fake_adef.assert_called_once_with(*expected_call) def test_pad_later_segments_area(self, GVSYReader, fake_adef): """Test _pad_later_segments_area() in the variable padding case.""" chk_pos_info = { - '1km': {'start_position_row': None, - 'end_position_row': 11136 - 278, - 'segment_height': 556, - 'grid_width': 11136}, - '2km': {'start_position_row': 0, - 'end_position_row': 0, - 'segment_height': 0, - 'grid_width': 5568}} + "1km": {"start_position_row": None, + "end_position_row": 11136 - 278, + "segment_height": 556, + "grid_width": 11136}, + "2km": {"start_position_row": 0, + "end_position_row": 0, + "segment_height": 0, + "grid_width": 5568}} expected_segments = 2 segment = 1 aex = [0, 1000, 200, 500] ashape = [556, 11136] fh_1, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) - GVSYReader.file_handlers = {'filetype1': [fh_1]} - dataid = 'dataid' + GVSYReader.file_handlers = {"filetype1": [fh_1]} + dataid = "dataid" res = GVSYReader._pad_later_segments_area([fh_1], dataid) assert len(res) == 2 @@ -1389,7 +1376,7 @@ def test_pad_later_segments_area(self, GVSYReader, fake_adef): # therefore, the new vertical area extent should be half of the previous size (1000-500)/2=250. # The new area extent lower-left row is therefore 1000+250=1250 seg2_extent = (0, 1250, 200, 1000) - expected_call = ('fill', 'fill', 'fill', 'some_crs', 11136, 278, + expected_call = ("fill", "fill", "fill", "some_crs", 11136, 278, seg2_extent) fake_adef.assert_called_once_with(*expected_call) @@ -1406,45 +1393,45 @@ def side_effect_areadef(a, b, c, crs, width, height, aex): fake_adef.side_effect = side_effect_areadef chk_pos_info = { - '1km': {'start_position_row': 11136 - 600 - 100 + 1, - 'end_position_row': 11136 - 600, - 'segment_height': 100, - 'grid_width': 11136}, - '2km': {'start_position_row': 0, - 'end_position_row': 0, - 'segment_height': 0, - 'grid_width': 5568}} + "1km": {"start_position_row": 11136 - 600 - 100 + 1, + "end_position_row": 11136 - 600, + "segment_height": 100, + "grid_width": 11136}, + "2km": {"start_position_row": 0, + "end_position_row": 0, + "segment_height": 0, + "grid_width": 5568}} expected_segments = 8 segment = 1 aex = [0, 1000, 200, 500] ashape = [100, 11136] fh_1, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) chk_pos_info = { - '1km': {'start_position_row': 11136 - 300 - 100 + 1, - 'end_position_row': 11136 - 300, - 'segment_height': 100, - 'grid_width': 11136}, - '2km': {'start_position_row': 0, - 'end_position_row': 0, - 'segment_height': 0, - 'grid_width': 5568}} + "1km": {"start_position_row": 11136 - 300 - 100 + 1, + "end_position_row": 11136 - 300, + "segment_height": 100, + "grid_width": 11136}, + "2km": {"start_position_row": 0, + "end_position_row": 0, + "segment_height": 0, + "grid_width": 5568}} segment = 4 fh_4, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) chk_pos_info = { - '1km': {'start_position_row': 11136 - 100 + 1, - 'end_position_row': None, - 'segment_height': 100, - 'grid_width': 11136}, - '2km': {'start_position_row': 0, - 'end_position_row': 0, - 'segment_height': 0, - 'grid_width': 5568}} + "1km": {"start_position_row": 11136 - 100 + 1, + "end_position_row": None, + "segment_height": 100, + "grid_width": 11136}, + "2km": {"start_position_row": 0, + "end_position_row": 0, + "segment_height": 0, + "grid_width": 5568}} segment = 8 fh_8, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) - GVSYReader.file_handlers = {'filetype1': [fh_1, fh_4, fh_8]} - dataid = 'dataid' + GVSYReader.file_handlers = {"filetype1": [fh_1, fh_4, fh_8]} + dataid = "dataid" res = GVSYReader._pad_later_segments_area([fh_1, fh_4, fh_8], dataid) assert len(res) == 8 @@ -1473,15 +1460,15 @@ def side_effect_areadef(a, b, c, crs, width, height, aex): # The second padded segment has 67px height -> 500*67/100=335 area extent height ->1330+335=1665 # The first padded segment has 67px height -> 500*67/100=335 area extent height ->1665+335=2000 assert fake_adef.call_count == 5 - expected_call1 = ('fill', 'fill', 'fill', 'some_crs', 11136, 100, + expected_call1 = ("fill", "fill", "fill", "some_crs", 11136, 100, (0, 1500.0, 200, 1000)) - expected_call2 = ('fill', 'fill', 'fill', 'some_crs', 11136, 100, + expected_call2 = ("fill", "fill", "fill", "some_crs", 11136, 100, (0, 2000.0, 200, 1500)) - expected_call3 = ('fill', 'fill', 'fill', 'some_crs', 11136, 66, + expected_call3 = ("fill", "fill", "fill", "some_crs", 11136, 66, (0, 1330.0, 200, 1000)) - expected_call4 = ('fill', 'fill', 'fill', 'some_crs', 11136, 67, + expected_call4 = ("fill", "fill", "fill", "some_crs", 11136, 67, (0, 1665.0, 200, 1330.0)) - expected_call5 = ('fill', 'fill', 'fill', 'some_crs', 11136, 67, + expected_call5 = ("fill", "fill", "fill", "some_crs", 11136, 67, (0, 2000.0, 200, 1665.0)) fake_adef.side_effect = None @@ -1496,22 +1483,22 @@ def test_get_empty_segment_with_height(self): """Test _get_empty_segment_with_height().""" from satpy.readers.yaml_reader import _get_empty_segment_with_height as geswh - dim = 'y' + dim = "y" # check expansion of empty segment - empty_segment = xr.DataArray(np.ones((139, 5568)), dims=['y', 'x']) + empty_segment = xr.DataArray(np.ones((139, 5568)), dims=["y", "x"]) new_height = 140 new_empty_segment = geswh(empty_segment, new_height, dim) assert new_empty_segment.shape == (140, 5568) # check reduction of empty segment - empty_segment = xr.DataArray(np.ones((140, 5568)), dims=['y', 'x']) + empty_segment = xr.DataArray(np.ones((140, 5568)), dims=["y", "x"]) new_height = 139 new_empty_segment = geswh(empty_segment, new_height, dim) assert new_empty_segment.shape == (139, 5568) # check that empty segment is not modified if it has the right height already - empty_segment = xr.DataArray(np.ones((140, 5568)), dims=['y', 'x']) + empty_segment = xr.DataArray(np.ones((140, 5568)), dims=["y", "x"]) new_height = 140 new_empty_segment = geswh(empty_segment, new_height, dim) assert new_empty_segment is empty_segment diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py index c87cd1055c..ca958fce37 100644 --- a/satpy/tests/utils.py +++ b/satpy/tests/utils.py @@ -69,7 +69,7 @@ def wrapper(self, *args, **kwargs): def convert_file_content_to_data_array(file_content, attrs=tuple(), - dims=('z', 'y', 'x')): + dims=("z", "y", "x")): """Help old reader tests that still use numpy arrays. A lot of old reader tests still use numpy arrays and depend on the @@ -98,8 +98,8 @@ def convert_file_content_to_data_array(file_content, attrs=tuple(), for key, val in file_content.items(): da_attrs = {} for a in attrs: - if key + '/attr/' + a in file_content: - da_attrs[a] = file_content[key + '/attr/' + a] + if key + "/attr/" + a in file_content: + da_attrs[a] = file_content[key + "/attr/" + a] if isinstance(val, np.ndarray): val = da.from_array(val, chunks=4096) @@ -120,14 +120,14 @@ def _filter_datasets(all_ds, names_or_ids): str_filter = [ds_name for ds_name in names_or_ids if isinstance(ds_name, str)] id_filter = [ds_id for ds_id in names_or_ids if not isinstance(ds_id, str)] for ds_id in all_ds: - if ds_id in id_filter or ds_id['name'] in str_filter: + if ds_id in id_filter or ds_id["name"] in str_filter: yield ds_id def _swath_def_of_data_arrays(rows, cols): return SwathDefinition( - DataArray(da.zeros((rows, cols)), dims=('y', 'x')), - DataArray(da.zeros((rows, cols)), dims=('y', 'x')), + DataArray(da.zeros((rows, cols)), dims=("y", "x")), + DataArray(da.zeros((rows, cols)), dims=("y", "x")), ) @@ -136,14 +136,14 @@ class FakeModifier(ModifierBase): def _handle_res_change(self, datasets, info): # assume this is used on the 500m version of ds5 - info['resolution'] = 250 + info["resolution"] = 250 rep_data_arr = datasets[0] - y_size = rep_data_arr.sizes['y'] - x_size = rep_data_arr.sizes['x'] + y_size = rep_data_arr.sizes["y"] + x_size = rep_data_arr.sizes["x"] data = da.zeros((y_size * 2, x_size * 2)) - if isinstance(rep_data_arr.attrs['area'], SwathDefinition): + if isinstance(rep_data_arr.attrs["area"], SwathDefinition): area = _swath_def_of_data_arrays(y_size * 2, x_size * 2) - info['area'] = area + info["area"] = area else: raise NotImplementedError("'res_change' modifier can't handle " "AreaDefinition changes yet.") @@ -151,20 +151,20 @@ def _handle_res_change(self, datasets, info): def __call__(self, datasets, optional_datasets=None, **kwargs): """Modify provided data depending on the modifier name and input data.""" - if self.attrs['optional_prerequisites']: - for opt_dep in self.attrs['optional_prerequisites']: - opt_dep_name = opt_dep if isinstance(opt_dep, str) else opt_dep.get('name', '') - if 'NOPE' in opt_dep_name or 'fail' in opt_dep_name: + if self.attrs["optional_prerequisites"]: + for opt_dep in self.attrs["optional_prerequisites"]: + opt_dep_name = opt_dep if isinstance(opt_dep, str) else opt_dep.get("name", "") + if "NOPE" in opt_dep_name or "fail" in opt_dep_name: continue - assert (optional_datasets is not None and - len(optional_datasets)) - resolution = datasets[0].attrs.get('resolution') - mod_name = self.attrs['modifiers'][-1] + assert optional_datasets is not None + assert len(optional_datasets) + resolution = datasets[0].attrs.get("resolution") + mod_name = self.attrs["modifiers"][-1] data = datasets[0].data i = datasets[0].attrs.copy() - if mod_name == 'res_change' and resolution is not None: + if mod_name == "res_change" and resolution is not None: data = self._handle_res_change(datasets, i) - elif 'incomp_areas' in mod_name: + elif "incomp_areas" in mod_name: raise IncompatibleAreas( "Test modifier 'incomp_areas' always raises IncompatibleAreas") self.apply_modifier_info(datasets[0].attrs, i) @@ -184,27 +184,27 @@ def __call__(self, projectables, nonprojectables=None, **kwargs): if nonprojectables: self.match_data_arrays(nonprojectables) info = self.attrs.copy() - if self.attrs['name'] in ('comp14', 'comp26'): + if self.attrs["name"] in ("comp14", "comp26"): # used as a test when composites update the dataset id with # information from prereqs - info['resolution'] = 555 - if self.attrs['name'] in ('comp24', 'comp25'): + info["resolution"] = 555 + if self.attrs["name"] in ("comp24", "comp25"): # other composites that copy the resolution from inputs - info['resolution'] = projectables[0].attrs.get('resolution') - if len(projectables) != len(self.attrs['prerequisites']): + info["resolution"] = projectables[0].attrs.get("resolution") + if len(projectables) != len(self.attrs["prerequisites"]): raise ValueError("Not enough prerequisite datasets passed") info.update(kwargs) if projectables: - info['area'] = projectables[0].attrs['area'] + info["area"] = projectables[0].attrs["area"] dim_sizes = projectables[0].sizes else: # static_image - dim_sizes = {'y': 4, 'x': 5} - return DataArray(data=da.zeros((dim_sizes['y'], dim_sizes['x'], 3)), + dim_sizes = {"y": 4, "x": 5} + return DataArray(data=da.zeros((dim_sizes["y"], dim_sizes["x"], 3)), attrs=info, - dims=['y', 'x', 'bands'], - coords={'bands': ['R', 'G', 'B']}) + dims=["y", "x", "bands"], + coords={"bands": ["R", "G", "B"]}) class FakeFileHandler(BaseFileHandler): @@ -228,21 +228,21 @@ def end_time(self): @property def sensor_names(self): """Get sensor name from filetype configuration.""" - sensor = self.filetype_info.get('sensor', 'fake_sensor') + sensor = self.filetype_info.get("sensor", "fake_sensor") return {sensor} def get_dataset(self, data_id: DataID, ds_info: dict): """Get fake DataArray for testing.""" - if data_id['name'] == 'ds9_fail_load': + if data_id["name"] == "ds9_fail_load": raise KeyError("Can't load '{}' because it is supposed to " - "fail.".format(data_id['name'])) + "fail.".format(data_id["name"])) attrs = data_id.to_dict() attrs.update(ds_info) - attrs['sensor'] = self.filetype_info.get('sensor', 'fake_sensor') - attrs['platform_name'] = 'fake_platform' - attrs['start_time'] = self.start_time - attrs['end_time'] = self.end_time - res = attrs.get('resolution', 250) + attrs["sensor"] = self.filetype_info.get("sensor", "fake_sensor") + attrs["platform_name"] = "fake_platform" + attrs["start_time"] = self.start_time + attrs["end_time"] = self.end_time + res = attrs.get("resolution", 250) rows = cols = { 250: 20, 500: 10, @@ -250,7 +250,7 @@ def get_dataset(self, data_id: DataID, ds_info: dict): }.get(res, 5) return DataArray(data=da.zeros((rows, cols)), attrs=attrs, - dims=['y', 'x']) + dims=["y", "x"]) def available_datasets(self, configured_datasets=None): """Report YAML datasets available unless 'not_available' is specified during creation.""" @@ -262,7 +262,7 @@ def available_datasets(self, configured_datasets=None): # file handler so let's yield early yield is_avail, ds_info continue - ft_matches = self.file_type_matches(ds_info['file_type']) + ft_matches = self.file_type_matches(ds_info["file_type"]) if not ft_matches: yield None, ds_info continue diff --git a/satpy/tests/writer_tests/test_awips_tiled.py b/satpy/tests/writer_tests/test_awips_tiled.py index a47552a708..eab72e8f5b 100644 --- a/satpy/tests/writer_tests/test_awips_tiled.py +++ b/satpy/tests/writer_tests/test_awips_tiled.py @@ -42,10 +42,10 @@ def _check_production_location(ds): - if 'production_site' in ds.attrs: - prod_loc_name = 'production_site' - elif 'production_location' in ds.attrs: - prod_loc_name = 'producton_location' + if "production_site" in ds.attrs: + prod_loc_name = "production_site" + elif "production_location" in ds.attrs: + prod_loc_name = "producton_location" else: return @@ -62,67 +62,67 @@ def check_required_properties(unmasked_ds, masked_ds): def _check_required_common_attributes(ds): """Check common properties of the created AWIPS tiles for validity.""" - for attr_name in ('tile_row_offset', 'tile_column_offset', - 'product_tile_height', 'product_tile_width', - 'number_product_tiles', - 'product_rows', 'product_columns'): + for attr_name in ("tile_row_offset", "tile_column_offset", + "product_tile_height", "product_tile_width", + "number_product_tiles", + "product_rows", "product_columns"): assert attr_name in ds.attrs _check_production_location(ds) for data_arr in ds.data_vars.values(): if data_arr.ndim == 0: # grid mapping variable - assert 'grid_mapping_name' in data_arr.attrs + assert "grid_mapping_name" in data_arr.attrs continue - assert data_arr.encoding.get('zlib', False) - assert 'grid_mapping' in data_arr.attrs - assert data_arr.attrs['grid_mapping'] in ds - assert 'units' in data_arr.attrs + assert data_arr.encoding.get("zlib", False) + assert "grid_mapping" in data_arr.attrs + assert data_arr.attrs["grid_mapping"] in ds + assert "units" in data_arr.attrs if data_arr.name != "DQF": assert data_arr.dtype == np.int16 assert data_arr.attrs["_Unsigned"] == "true" def _check_scaled_x_coordinate_variable(ds, masked_ds): - assert 'x' in ds.coords - x_coord = ds.coords['x'] + assert "x" in ds.coords + x_coord = ds.coords["x"] np.testing.assert_equal(np.diff(x_coord), 1) x_attrs = x_coord.attrs - assert x_attrs.get('standard_name') == 'projection_x_coordinate' - assert x_attrs.get('units') == 'meters' - assert 'scale_factor' in x_attrs - assert x_attrs['scale_factor'] > 0 - assert 'add_offset' in x_attrs + assert x_attrs.get("standard_name") == "projection_x_coordinate" + assert x_attrs.get("units") == "meters" + assert "scale_factor" in x_attrs + assert x_attrs["scale_factor"] > 0 + assert "add_offset" in x_attrs - unscaled_x = masked_ds.coords['x'].values + unscaled_x = masked_ds.coords["x"].values assert (np.diff(unscaled_x) > 0).all() def _check_scaled_y_coordinate_variable(ds, masked_ds): - assert 'y' in ds.coords - y_coord = ds.coords['y'] + assert "y" in ds.coords + y_coord = ds.coords["y"] np.testing.assert_equal(np.diff(y_coord), 1) y_attrs = y_coord.attrs - assert y_attrs.get('standard_name') == 'projection_y_coordinate' - assert y_attrs.get('units') == 'meters' - assert 'scale_factor' in y_attrs - assert y_attrs['scale_factor'] < 0 - assert 'add_offset' in y_attrs + assert y_attrs.get("standard_name") == "projection_y_coordinate" + assert y_attrs.get("units") == "meters" + assert "scale_factor" in y_attrs + assert y_attrs["scale_factor"] < 0 + assert "add_offset" in y_attrs - unscaled_y = masked_ds.coords['y'].values + unscaled_y = masked_ds.coords["y"].values assert (np.diff(unscaled_y) < 0).all() def _get_test_area(shape=(200, 100), crs=None, extents=None): from pyresample.geometry import AreaDefinition if crs is None: - crs = CRS('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') + crs = CRS("+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs") if extents is None: extents = (-1000., -1500., 1000., 1500.) area_def = AreaDefinition( - 'test', - 'test', - 'test', + "test", + "test", + "test", crs, shape[1], shape[0], @@ -138,11 +138,11 @@ def _get_test_data(shape=(200, 100), chunks=50): def _get_test_lcc_data(dask_arr, area_def, extra_attrs=None): attrs = dict( - name='test_ds', - platform_name='PLAT', - sensor='SENSOR', - units='1', - standard_name='toa_bidirectional_reflectance', + name="test_ds", + platform_name="PLAT", + sensor="SENSOR", + units="1", + standard_name="toa_bidirectional_reflectance", area=area_def, start_time=START_TIME, end_time=END_TIME @@ -151,7 +151,7 @@ def _get_test_lcc_data(dask_arr, area_def, extra_attrs=None): attrs.update(extra_attrs) ds = xr.DataArray( dask_arr, - dims=('y', 'x') if dask_arr.ndim == 2 else ('bands', 'y', 'x'), + dims=("y", "x") if dask_arr.ndim == 2 else ("bands", "y", "x"), attrs=attrs, ) return update_resampled_coords(ds, ds, area_def) @@ -165,13 +165,13 @@ def test_init(self, tmp_path): from satpy.writers.awips_tiled import AWIPSTiledWriter AWIPSTiledWriter(base_dir=str(tmp_path)) - @pytest.mark.parametrize('use_save_dataset', + @pytest.mark.parametrize("use_save_dataset", [(False,), (True,)]) @pytest.mark.parametrize( - ('extra_attrs', 'expected_filename'), + ("extra_attrs", "expected_filename"), [ - ({}, 'TESTS_AII_PLAT_SENSOR_test_ds_TEST_T001_20180101_1200.nc'), - ({'sensor': 'viirs', 'name': 'I01'}, 'TESTS_AII_PLAT_viirs_I01_TEST_T001_20180101_1200.nc'), + ({}, "TESTS_AII_PLAT_SENSOR_test_ds_TEST_T001_20180101_1200.nc"), + ({"sensor": "viirs", "name": "I01"}, "TESTS_AII_PLAT_viirs_I01_TEST_T001_20180101_1200.nc"), ] ) def test_basic_numbered_1_tile(self, extra_attrs, expected_filename, use_save_dataset, caplog, tmp_path): @@ -183,21 +183,21 @@ def test_basic_numbered_1_tile(self, extra_attrs, expected_filename, use_save_da with caplog.at_level(logging.DEBUG): w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) if use_save_dataset: - w.save_dataset(input_data_arr, sector_id='TEST', source_name='TESTS') + w.save_dataset(input_data_arr, sector_id="TEST", source_name="TESTS") else: - w.save_datasets([input_data_arr], sector_id='TEST', source_name='TESTS') + w.save_datasets([input_data_arr], sector_id="TEST", source_name="TESTS") assert "no routine matching" not in caplog.text assert "Can't format string" not in caplog.text - all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) assert len(all_files) == 1 assert os.path.basename(all_files[0]) == expected_filename for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) output_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, output_ds) - scale_factor = output_ds['data'].encoding['scale_factor'] - np.testing.assert_allclose(input_data_arr.values, output_ds['data'].data, + scale_factor = output_ds["data"].encoding["scale_factor"] + np.testing.assert_allclose(input_data_arr.values, output_ds["data"].data, atol=scale_factor / 2) def test_units_length_warning(self, tmp_path): @@ -208,8 +208,8 @@ def test_units_length_warning(self, tmp_path): input_data_arr = _get_test_lcc_data(data, area_def) input_data_arr.attrs["units"] = "this is a really long units string" w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) - with pytest.warns(UserWarning, match=r'.*this is a really long units string.*too long.*'): - w.save_dataset(input_data_arr, sector_id='TEST', source_name='TESTS') + with pytest.warns(UserWarning, match=r".*this is a really long units string.*too long.*"): + w.save_dataset(input_data_arr, sector_id="TEST", source_name="TESTS") @pytest.mark.parametrize( ("tile_count", "tile_size"), @@ -228,33 +228,33 @@ def test_basic_numbered_tiles(self, tile_count, tile_size, tmp_path): input_data_arr = _get_test_lcc_data(data, area_def) w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) save_kwargs = dict( - sector_id='TEST', + sector_id="TEST", source_name="TESTS", tile_count=tile_count, tile_size=tile_size, - extra_global_attrs={'my_global': 'TEST'} + extra_global_attrs={"my_global": "TEST"} ) should_error = tile_count is None and tile_size is None if should_error: with dask.config.set(scheduler=CustomScheduler(0)), \ - pytest.raises(ValueError, match=r'Either.*tile_count.*'): + pytest.raises(ValueError, match=r"Either.*tile_count.*"): w.save_datasets([input_data_arr], **save_kwargs) else: with dask.config.set(scheduler=CustomScheduler(1 * 2)): # precompute=*2 w.save_datasets([input_data_arr], **save_kwargs) - all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) expected_num_files = 0 if should_error else 9 assert len(all_files) == expected_num_files for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) - assert unmasked_ds.attrs['my_global'] == 'TEST' - assert unmasked_ds.attrs['sector_id'] == 'TEST' - assert 'physical_element' in unmasked_ds.attrs - stime = input_data_arr.attrs['start_time'] - assert unmasked_ds.attrs['start_date_time'] == stime.strftime('%Y-%m-%dT%H:%M:%S') + assert unmasked_ds.attrs["my_global"] == "TEST" + assert unmasked_ds.attrs["sector_id"] == "TEST" + assert "physical_element" in unmasked_ds.attrs + stime = input_data_arr.attrs["start_time"] + assert unmasked_ds.attrs["start_date_time"] == stime.strftime("%Y-%m-%dT%H:%M:%S") def test_basic_lettered_tiles(self, tmp_path): """Test creating a lettered grid.""" @@ -265,14 +265,14 @@ def test_basic_lettered_tiles(self, tmp_path): extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) # tile_count should be ignored since we specified lettered_grid - w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) - all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) + w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) + all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) assert len(all_files) == 16 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) - assert masked_ds.attrs['start_date_time'] == START_TIME.strftime('%Y-%m-%dT%H:%M:%S') + assert masked_ds.attrs["start_date_time"] == START_TIME.strftime("%Y-%m-%dT%H:%M:%S") def test_basic_lettered_tiles_diff_projection(self, tmp_path): """Test creating a lettered grid from data with differing projection..""" @@ -284,20 +284,20 @@ def test_basic_lettered_tiles_diff_projection(self, tmp_path): extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) # tile_count should be ignored since we specified lettered_grid - w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) - all_files = sorted(glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc'))) + w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) + all_files = sorted(glob(os.path.join(str(tmp_path), "TESTS_AII*.nc"))) assert len(all_files) == 24 assert "TC02" in all_files[0] # the first tile should be TC02 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) - assert masked_ds.attrs['start_date_time'] == START_TIME.strftime('%Y-%m-%dT%H:%M:%S') + assert masked_ds.attrs["start_date_time"] == START_TIME.strftime("%Y-%m-%dT%H:%M:%S") def test_lettered_tiles_update_existing(self, tmp_path): """Test updating lettered tiles with additional data.""" from satpy.writers.awips_tiled import AWIPSTiledWriter - first_base_dir = os.path.join(str(tmp_path), 'first') + first_base_dir = os.path.join(str(tmp_path), "first") w = AWIPSTiledWriter(base_dir=first_base_dir, compress=True) shape = (2000, 1000) data = np.linspace(0., 1., shape[0] * shape[1], dtype=np.float32).reshape(shape) @@ -308,11 +308,11 @@ def test_lettered_tiles_update_existing(self, tmp_path): extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) # tile_count should be ignored since we specified lettered_grid - w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) - all_files = sorted(glob(os.path.join(first_base_dir, 'TESTS_AII*.nc'))) + w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) + all_files = sorted(glob(os.path.join(first_base_dir, "TESTS_AII*.nc"))) assert len(all_files) == 16 first_files = [] - second_base_dir = os.path.join(str(tmp_path), 'second') + second_base_dir = os.path.join(str(tmp_path), "second") os.makedirs(second_base_dir) for fn in all_files: new_fn = fn.replace(first_base_dir, second_base_dir) @@ -335,23 +335,23 @@ def test_lettered_tiles_update_existing(self, tmp_path): # file multiple times...sometimes. If we limit dask to one worker # it seems to work fine. with dask.config.set(num_workers=1): - w.save_datasets([ds2], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) - all_files = glob(os.path.join(second_base_dir, 'TESTS_AII*.nc')) + w.save_datasets([ds2], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) + all_files = glob(os.path.join(second_base_dir, "TESTS_AII*.nc")) # 16 original tiles + 4 new tiles assert len(all_files) == 20 # these tiles should be the right-most edge of the first image - first_right_edge_files = [x for x in first_files if 'P02' in x or 'P04' in x or 'V02' in x or 'V04' in x] + first_right_edge_files = [x for x in first_files if "P02" in x or "P04" in x or "V02" in x or "V04" in x] for new_file in first_right_edge_files: orig_file = new_file.replace(second_base_dir, first_base_dir) orig_nc = xr.open_dataset(orig_file) - orig_data = orig_nc['data'].values + orig_data = orig_nc["data"].values if not np.isnan(orig_data).any(): # we only care about the tiles that had NaNs originally continue new_nc = xr.open_dataset(new_file) - new_data = new_nc['data'].values + new_data = new_nc["data"].values # there should be at least some areas of the file # that old data was present and hasn't been replaced np.testing.assert_allclose(orig_data[:, :20], new_data[:, :20]) @@ -369,17 +369,17 @@ def test_lettered_tiles_sector_ref(self, tmp_path): area_def = _get_test_area(shape=(2000, 1000), extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) - w.save_datasets([ds], sector_id='LCC', source_name="TESTS", + w.save_datasets([ds], sector_id="LCC", source_name="TESTS", lettered_grid=True, use_sector_reference=True, use_end_time=True) - all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) assert len(all_files) == 16 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) - expected_start = (START_TIME + timedelta(minutes=20)).strftime('%Y-%m-%dT%H:%M:%S') - assert masked_ds.attrs['start_date_time'] == expected_start + expected_start = (START_TIME + timedelta(minutes=20)).strftime("%Y-%m-%dT%H:%M:%S") + assert masked_ds.attrs["start_date_time"] == expected_start def test_lettered_tiles_no_fit(self, tmp_path): """Test creating a lettered grid with no data overlapping the grid.""" @@ -389,9 +389,9 @@ def test_lettered_tiles_no_fit(self, tmp_path): area_def = _get_test_area(shape=(2000, 1000), extents=(4000000., 5000000., 5000000., 6000000.)) ds = _get_test_lcc_data(data, area_def) - w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) + w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) # No files created - all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) assert not all_files def test_lettered_tiles_no_valid_data(self, tmp_path): @@ -402,9 +402,9 @@ def test_lettered_tiles_no_valid_data(self, tmp_path): area_def = _get_test_area(shape=(2000, 1000), extents=(-1000000., -1500000., 1000000., 1500000.)) ds = _get_test_lcc_data(data, area_def) - w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) + w.save_datasets([ds], sector_id="LCC", source_name="TESTS", tile_count=(3, 3), lettered_grid=True) # No files created - all NaNs should result in no tiles being created - all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), "TESTS_AII*.nc")) assert not all_files def test_lettered_tiles_bad_filename(self, tmp_path): @@ -417,8 +417,8 @@ def test_lettered_tiles_bad_filename(self, tmp_path): ds = _get_test_lcc_data(data, area_def) with pytest.raises(KeyError): w.save_datasets([ds], - sector_id='LCC', - source_name='TESTS', + sector_id="LCC", + source_name="TESTS", tile_count=(3, 3), lettered_grid=True) @@ -429,17 +429,17 @@ def test_basic_numbered_tiles_rgb(self, tmp_path): data = da.from_array(np.linspace(0., 1., 60000, dtype=np.float32).reshape((3, 200, 100)), chunks=50) area_def = _get_test_area() ds = _get_test_lcc_data(data, area_def) - ds = ds.rename(dict((old, new) for old, new in zip(ds.dims, ['bands', 'y', 'x']))) - ds.coords['bands'] = ['R', 'G', 'B'] + ds = ds.rename(dict((old, new) for old, new in zip(ds.dims, ["bands", "y", "x"]))) + ds.coords["bands"] = ["R", "G", "B"] - w.save_datasets([ds], sector_id='TEST', source_name="TESTS", tile_count=(3, 3)) - chan_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*test_ds_R*.nc')) + w.save_datasets([ds], sector_id="TEST", source_name="TESTS", tile_count=(3, 3)) + chan_files = glob(os.path.join(str(tmp_path), "TESTS_AII*test_ds_R*.nc")) all_files = chan_files[:] assert len(chan_files) == 9 - chan_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*test_ds_G*.nc')) + chan_files = glob(os.path.join(str(tmp_path), "TESTS_AII*test_ds_G*.nc")) all_files.extend(chan_files) assert len(chan_files) == 9 - chan_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*test_ds_B*.nc')) + chan_files = glob(os.path.join(str(tmp_path), "TESTS_AII*test_ds_B*.nc")) assert len(chan_files) == 9 all_files.extend(chan_files) for fn in all_files: @@ -449,54 +449,54 @@ def test_basic_numbered_tiles_rgb(self, tmp_path): @pytest.mark.parametrize( "sector", - ['C', - 'F'] + ["C", + "F"] ) @pytest.mark.parametrize( "extra_kwargs", [ {}, - {'environment_prefix': 'AA'}, - {'environment_prefix': 'BB', 'filename': '{environment_prefix}_{name}_GLM_T{tile_number:04d}.nc'}, + {"environment_prefix": "AA"}, + {"environment_prefix": "BB", "filename": "{environment_prefix}_{name}_GLM_T{tile_number:04d}.nc"}, ] ) def test_multivar_numbered_tiles_glm(self, sector, extra_kwargs, tmp_path): """Test creating a tiles with multiple variables.""" from satpy.writers.awips_tiled import AWIPSTiledWriter - os.environ['ORGANIZATION'] = '1' * 50 + os.environ["ORGANIZATION"] = "1" * 50 w = AWIPSTiledWriter(base_dir=tmp_path, compress=True) data = _get_test_data() area_def = _get_test_area() ds1 = _get_test_lcc_data(data, area_def) ds1.attrs.update( dict( - name='total_energy', - platform_name='GOES-17', - sensor='SENSOR', - units='1', - scan_mode='M3', + name="total_energy", + platform_name="GOES-17", + sensor="SENSOR", + units="1", + scan_mode="M3", scene_abbr=sector, platform_shortname="G17" ) ) ds2 = ds1.copy() ds2.attrs.update({ - 'name': 'flash_extent_density', + "name": "flash_extent_density", }) ds3 = ds1.copy() ds3.attrs.update({ - 'name': 'average_flash_area', + "name": "average_flash_area", }) dqf = ds1.copy() dqf = (dqf * 255).astype(np.uint8) dqf.attrs = ds1.attrs.copy() dqf.attrs.update({ - 'name': 'DQF', - '_FillValue': 1, + "name": "DQF", + "_FillValue": 1, }) - w.save_datasets([ds1, ds2, ds3, dqf], sector_id='TEST', source_name="TESTS", - tile_count=(3, 3), template='glm_l2_rad{}'.format(sector.lower()), + w.save_datasets([ds1, ds2, ds3, dqf], sector_id="TEST", source_name="TESTS", + tile_count=(3, 3), template="glm_l2_rad{}".format(sector.lower()), **extra_kwargs) fn_glob = self._get_glm_glob_filename(extra_kwargs) all_files = glob(os.path.join(str(tmp_path), fn_glob)) @@ -505,15 +505,15 @@ def test_multivar_numbered_tiles_glm(self, sector, extra_kwargs, tmp_path): unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) - if sector == 'C': - assert masked_ds.attrs['time_coverage_end'] == END_TIME.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + if sector == "C": + assert masked_ds.attrs["time_coverage_end"] == END_TIME.strftime("%Y-%m-%dT%H:%M:%S.%fZ") else: # 'F' - assert masked_ds.attrs['time_coverage_end'] == END_TIME.strftime('%Y-%m-%dT%H:%M:%SZ') + assert masked_ds.attrs["time_coverage_end"] == END_TIME.strftime("%Y-%m-%dT%H:%M:%SZ") @staticmethod def _get_glm_glob_filename(extra_kwargs): - if 'filename' in extra_kwargs: - return 'BB*_GLM*.nc' - elif 'environment_prefix' in extra_kwargs: - return 'AA*_GLM*.nc' - return 'DR*_GLM*.nc' + if "filename" in extra_kwargs: + return "BB*_GLM*.nc" + elif "environment_prefix" in extra_kwargs: + return "AA*_GLM*.nc" + return "DR*_GLM*.nc" diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 7fdcaeb553..18b5947eb6 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -70,7 +70,7 @@ def __exit__(self, *args): def test_lonlat_storage(tmp_path): """Test correct storage for area with lon/lat units.""" - from ..utils import make_fake_scene + from satpy.tests.utils import make_fake_scene scn = make_fake_scene( {"ketolysis": np.arange(25).reshape(5, 5)}, daskify=True, @@ -139,11 +139,11 @@ def test_preprocess_dataarray_name(): from satpy.writers.cf_writer import _preprocess_dataarray_name scn = Scene() - scn['1'] = xr.DataArray([1, 2, 3]) - dataarray = scn['1'] + scn["1"] = xr.DataArray([1, 2, 3]) + dataarray = scn["1"] # If numeric_name_prefix is a string, test add the original_name attributes out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="TEST", include_orig_name=True) - assert out_da.attrs['original_name'] == '1' + assert out_da.attrs["original_name"] == "1" # If numeric_name_prefix is empty string, False or None, test do not add original_name attributes out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="", include_orig_name=True) @@ -163,16 +163,16 @@ def test_add_time_cf_attrs(): scn = Scene() test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) - times = np.array(['2018-05-30T10:05:00', '2018-05-30T10:05:01', - '2018-05-30T10:05:02', '2018-05-30T10:05:03'], dtype=np.datetime64) - scn['test-array'] = xr.DataArray(test_array, - dims=['y', 'x'], - coords={'time': ('y', times)}, + times = np.array(["2018-05-30T10:05:00", "2018-05-30T10:05:01", + "2018-05-30T10:05:02", "2018-05-30T10:05:03"], dtype=np.datetime64) + scn["test-array"] = xr.DataArray(test_array, + dims=["y", "x"], + coords={"time": ("y", times)}, attrs=dict(start_time=times[0], end_time=times[-1])) - ds = scn['test-array'].to_dataset(name='test-array') + ds = scn["test-array"].to_dataset(name="test-array") ds = add_time_bounds_dimension(ds) assert "bnds_1d" in ds.dims - assert ds.dims['bnds_1d'] == 2 + assert ds.dims["bnds_1d"] == 2 assert "time_bnds" in list(ds.data_vars) assert "bounds" in ds["time"].attrs assert "standard_name" in ds["time"].attrs @@ -194,23 +194,23 @@ def test_init(self): from satpy.writers import configs_for_writer from satpy.writers.cf_writer import CFWriter - CFWriter(config_files=list(configs_for_writer('cf'))[0]) + CFWriter(config_files=list(configs_for_writer("cf"))[0]) def test_save_array(self): """Test saving an array to netcdf/cf.""" scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) - scn['test-array'] = xr.DataArray([1, 2, 3], + scn["test-array"] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time, - prerequisites=[make_dsq(name='hej')])) + prerequisites=[make_dsq(name="hej")])) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename) as f: - np.testing.assert_array_equal(f['test-array'][:], [1, 2, 3]) + np.testing.assert_array_equal(f["test-array"][:], [1, 2, 3]) expected_prereq = ("DataQuery(name='hej')") - assert f['test-array'].attrs['prerequisites'] == expected_prereq + assert f["test-array"].attrs["prerequisites"] == expected_prereq def test_save_array_coords(self): """Test saving array with coordinates.""" @@ -218,69 +218,69 @@ def test_save_array_coords(self): start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) coords = { - 'x': np.arange(3), - 'y': np.arange(1), + "x": np.arange(3), + "y": np.arange(1), } if CRS is not None: - proj_str = ('+proj=geos +lon_0=-95.0 +h=35786023.0 ' - '+a=6378137.0 +b=6356752.31414 +sweep=x ' - '+units=m +no_defs') - coords['crs'] = CRS.from_string(proj_str) - scn['test-array'] = xr.DataArray([[1, 2, 3]], - dims=('y', 'x'), + proj_str = ("+proj=geos +lon_0=-95.0 +h=35786023.0 " + "+a=6378137.0 +b=6356752.31414 +sweep=x " + "+units=m +no_defs") + coords["crs"] = CRS.from_string(proj_str) + scn["test-array"] = xr.DataArray([[1, 2, 3]], + dims=("y", "x"), coords=coords, attrs=dict(start_time=start_time, end_time=end_time, - prerequisites=[make_dsq(name='hej')])) + prerequisites=[make_dsq(name="hej")])) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename) as f: - np.testing.assert_array_equal(f['test-array'][:], [[1, 2, 3]]) - np.testing.assert_array_equal(f['x'][:], [0, 1, 2]) - np.testing.assert_array_equal(f['y'][:], [0]) - assert 'crs' not in f - assert '_FillValue' not in f['x'].attrs - assert '_FillValue' not in f['y'].attrs + np.testing.assert_array_equal(f["test-array"][:], [[1, 2, 3]]) + np.testing.assert_array_equal(f["x"][:], [0, 1, 2]) + np.testing.assert_array_equal(f["y"][:], [0]) + assert "crs" not in f + assert "_FillValue" not in f["x"].attrs + assert "_FillValue" not in f["y"].attrs expected_prereq = ("DataQuery(name='hej')") - assert f['test-array'].attrs['prerequisites'] == expected_prereq + assert f["test-array"].attrs["prerequisites"] == expected_prereq def test_save_dataset_a_digit(self): """Test saving an array to netcdf/cf where dataset name starting with a digit.""" scn = Scene() - scn['1'] = xr.DataArray([1, 2, 3]) + scn["1"] = xr.DataArray([1, 2, 3]) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename) as f: - np.testing.assert_array_equal(f['CHANNEL_1'][:], [1, 2, 3]) + np.testing.assert_array_equal(f["CHANNEL_1"][:], [1, 2, 3]) def test_save_dataset_a_digit_prefix(self): """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix.""" scn = Scene() - scn['1'] = xr.DataArray([1, 2, 3]) + scn["1"] = xr.DataArray([1, 2, 3]) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', numeric_name_prefix='TEST') + scn.save_datasets(filename=filename, writer="cf", numeric_name_prefix="TEST") with xr.open_dataset(filename) as f: - np.testing.assert_array_equal(f['TEST1'][:], [1, 2, 3]) + np.testing.assert_array_equal(f["TEST1"][:], [1, 2, 3]) def test_save_dataset_a_digit_prefix_include_attr(self): """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix include orig name.""" scn = Scene() - scn['1'] = xr.DataArray([1, 2, 3]) + scn["1"] = xr.DataArray([1, 2, 3]) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', include_orig_name=True, numeric_name_prefix='TEST') + scn.save_datasets(filename=filename, writer="cf", include_orig_name=True, numeric_name_prefix="TEST") with xr.open_dataset(filename) as f: - np.testing.assert_array_equal(f['TEST1'][:], [1, 2, 3]) - assert f['TEST1'].attrs['original_name'] == '1' + np.testing.assert_array_equal(f["TEST1"][:], [1, 2, 3]) + assert f["TEST1"].attrs["original_name"] == "1" def test_save_dataset_a_digit_no_prefix_include_attr(self): """Test saving an array to netcdf/cf dataset name starting with a digit with no prefix include orig name.""" scn = Scene() - scn['1'] = xr.DataArray([1, 2, 3]) + scn["1"] = xr.DataArray([1, 2, 3]) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', include_orig_name=True, numeric_name_prefix='') + scn.save_datasets(filename=filename, writer="cf", include_orig_name=True, numeric_name_prefix="") with xr.open_dataset(filename) as f: - np.testing.assert_array_equal(f['1'][:], [1, 2, 3]) - assert 'original_name' not in f['1'].attrs + np.testing.assert_array_equal(f["1"][:], [1, 2, 3]) + assert "original_name" not in f["1"].attrs def test_ancillary_variables(self): """Test ancillary_variables cited each other.""" @@ -291,16 +291,16 @@ def test_ancillary_variables(self): da = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time, - prerequisites=[make_dataid(name='hej')])) - scn['test-array-1'] = da - scn['test-array-2'] = da.copy() - scn['test-array-1'].attrs['ancillary_variables'] = [scn['test-array-2']] - scn['test-array-2'].attrs['ancillary_variables'] = [scn['test-array-1']] + prerequisites=[make_dataid(name="hej")])) + scn["test-array-1"] = da + scn["test-array-2"] = da.copy() + scn["test-array-1"].attrs["ancillary_variables"] = [scn["test-array-2"]] + scn["test-array-2"].attrs["ancillary_variables"] = [scn["test-array-1"]] with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename) as f: - assert f['test-array-1'].attrs['ancillary_variables'] == 'test-array-2' - assert f['test-array-2'].attrs['ancillary_variables'] == 'test-array-1' + assert f["test-array-1"].attrs["ancillary_variables"] == "test-array-2" + assert f["test-array-2"].attrs["ancillary_variables"] == "test-array-1" def test_groups(self): """Test creating a file with groups.""" @@ -319,34 +319,34 @@ def test_groups(self): time_hrv = [1, 2, 3] scn = Scene() - scn['VIS006'] = xr.DataArray(data_visir, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006)}, - attrs={'name': 'VIS006', 'start_time': tstart, 'end_time': tend}) - scn['IR_108'] = xr.DataArray(data_visir, - dims=('y', 'x'), - coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_ir_108)}, - attrs={'name': 'IR_108', 'start_time': tstart, 'end_time': tend}) - scn['HRV'] = xr.DataArray(data_hrv, - dims=('y', 'x'), - coords={'y': y_hrv, 'x': x_hrv, 'acq_time': ('y', time_hrv)}, - attrs={'name': 'HRV', 'start_time': tstart, 'end_time': tend}) + scn["VIS006"] = xr.DataArray(data_visir, + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir, "acq_time": ("y", time_vis006)}, + attrs={"name": "VIS006", "start_time": tstart, "end_time": tend}) + scn["IR_108"] = xr.DataArray(data_visir, + dims=("y", "x"), + coords={"y": y_visir, "x": x_visir, "acq_time": ("y", time_ir_108)}, + attrs={"name": "IR_108", "start_time": tstart, "end_time": tend}) + scn["HRV"] = xr.DataArray(data_hrv, + dims=("y", "x"), + coords={"y": y_hrv, "x": x_hrv, "acq_time": ("y", time_hrv)}, + attrs={"name": "HRV", "start_time": tstart, "end_time": tend}) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', groups={'visir': ['IR_108', 'VIS006'], 'hrv': ['HRV']}, + scn.save_datasets(filename=filename, writer="cf", groups={"visir": ["IR_108", "VIS006"], "hrv": ["HRV"]}, pretty=True) nc_root = xr.open_dataset(filename) - assert 'history' in nc_root.attrs + assert "history" in nc_root.attrs assert set(nc_root.variables.keys()) == set() - nc_visir = xr.open_dataset(filename, group='visir') - nc_hrv = xr.open_dataset(filename, group='hrv') - assert set(nc_visir.variables.keys()) == {'VIS006', 'IR_108', - 'y', 'x', 'VIS006_acq_time', 'IR_108_acq_time'} - assert set(nc_hrv.variables.keys()) == {'HRV', 'y', 'x', 'acq_time'} - for tst, ref in zip([nc_visir['VIS006'], nc_visir['IR_108'], nc_hrv['HRV']], - [scn['VIS006'], scn['IR_108'], scn['HRV']]): + nc_visir = xr.open_dataset(filename, group="visir") + nc_hrv = xr.open_dataset(filename, group="hrv") + assert set(nc_visir.variables.keys()) == {"VIS006", "IR_108", + "y", "x", "VIS006_acq_time", "IR_108_acq_time"} + assert set(nc_hrv.variables.keys()) == {"HRV", "y", "x", "acq_time"} + for tst, ref in zip([nc_visir["VIS006"], nc_visir["IR_108"], nc_hrv["HRV"]], + [scn["VIS006"], scn["IR_108"], scn["HRV"]]): np.testing.assert_array_equal(tst.data, ref.data) nc_root.close() nc_visir.close() @@ -354,8 +354,8 @@ def test_groups(self): # Different projection coordinates in one group are not supported with TempFile() as filename: - with pytest.raises(ValueError): - scn.save_datasets(datasets=['VIS006', 'HRV'], filename=filename, writer='cf') + with pytest.raises(ValueError, match="Datasets .* must have identical projection coordinates..*"): + scn.save_datasets(datasets=["VIS006", "HRV"], filename=filename, writer="cf") def test_single_time_value(self): """Test setting a single time value.""" @@ -363,32 +363,32 @@ def test_single_time_value(self): start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]) - scn['test-array'] = xr.DataArray(test_array, - dims=['x', 'y'], - coords={'time': np.datetime64('2018-05-30T10:05:00')}, + scn["test-array"] = xr.DataArray(test_array, + dims=["x", "y"], + coords={"time": np.datetime64("2018-05-30T10:05:00")}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename, decode_cf=True) as f: - np.testing.assert_array_equal(f['time'], scn['test-array']['time']) - bounds_exp = np.array([[start_time, end_time]], dtype='datetime64[m]') - np.testing.assert_array_equal(f['time_bnds'], bounds_exp) + np.testing.assert_array_equal(f["time"], scn["test-array"]["time"]) + bounds_exp = np.array([[start_time, end_time]], dtype="datetime64[m]") + np.testing.assert_array_equal(f["time_bnds"], bounds_exp) def test_time_coordinate_on_a_swath(self): """Test that time dimension is not added on swath data with time already as a coordinate.""" scn = Scene() test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) - times = np.array(['2018-05-30T10:05:00', '2018-05-30T10:05:01', - '2018-05-30T10:05:02', '2018-05-30T10:05:03'], dtype=np.datetime64) - scn['test-array'] = xr.DataArray(test_array, - dims=['y', 'x'], - coords={'time': ('y', times)}, + times = np.array(["2018-05-30T10:05:00", "2018-05-30T10:05:01", + "2018-05-30T10:05:02", "2018-05-30T10:05:03"], dtype=np.datetime64) + scn["test-array"] = xr.DataArray(test_array, + dims=["y", "x"], + coords={"time": ("y", times)}, attrs=dict(start_time=times[0], end_time=times[-1])) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', pretty=True) + scn.save_datasets(filename=filename, writer="cf", pretty=True) with xr.open_dataset(filename, decode_cf=True) as f: - np.testing.assert_array_equal(f['time'], scn['test-array']['time']) + np.testing.assert_array_equal(f["time"], scn["test-array"]["time"]) def test_bounds(self): """Test setting time bounds.""" @@ -396,30 +396,30 @@ def test_bounds(self): start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) - scn['test-array'] = xr.DataArray(test_array, - dims=['x', 'y', 'time'], - coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, + scn["test-array"] = xr.DataArray(test_array, + dims=["x", "y", "time"], + coords={"time": [np.datetime64("2018-05-30T10:05:00")]}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") # Check decoded time coordinates & bounds with xr.open_dataset(filename, decode_cf=True) as f: - bounds_exp = np.array([[start_time, end_time]], dtype='datetime64[m]') - np.testing.assert_array_equal(f['time_bnds'], bounds_exp) - assert f['time'].attrs['bounds'] == 'time_bnds' + bounds_exp = np.array([[start_time, end_time]], dtype="datetime64[m]") + np.testing.assert_array_equal(f["time_bnds"], bounds_exp) + assert f["time"].attrs["bounds"] == "time_bnds" # Check raw time coordinates & bounds with xr.open_dataset(filename, decode_cf=False) as f: - np.testing.assert_almost_equal(f['time_bnds'], [[-0.0034722, 0.0069444]]) + np.testing.assert_almost_equal(f["time_bnds"], [[-0.0034722, 0.0069444]]) # User-specified time encoding should have preference with TempFile() as filename: - time_units = 'seconds since 2018-01-01' - scn.save_datasets(filename=filename, encoding={'time': {'units': time_units}}, - writer='cf') + time_units = "seconds since 2018-01-01" + scn.save_datasets(filename=filename, encoding={"time": {"units": time_units}}, + writer="cf") with xr.open_dataset(filename, decode_cf=False) as f: - np.testing.assert_array_equal(f['time_bnds'], [[12909600, 12910500]]) + np.testing.assert_array_equal(f["time_bnds"], [[12909600, 12910500]]) def test_bounds_minimum(self): """Test minimum bounds.""" @@ -430,21 +430,21 @@ def test_bounds_minimum(self): end_timeB = datetime(2018, 5, 30, 10, 15) # expected to be used test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) - scn['test-arrayA'] = xr.DataArray(test_arrayA, - dims=['x', 'y', 'time'], - coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, + scn["test-arrayA"] = xr.DataArray(test_arrayA, + dims=["x", "y", "time"], + coords={"time": [np.datetime64("2018-05-30T10:05:00")]}, attrs=dict(start_time=start_timeA, end_time=end_timeA)) - scn['test-arrayB'] = xr.DataArray(test_arrayB, - dims=['x', 'y', 'time'], - coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, + scn["test-arrayB"] = xr.DataArray(test_arrayB, + dims=["x", "y", "time"], + coords={"time": [np.datetime64("2018-05-30T10:05:00")]}, attrs=dict(start_time=start_timeB, end_time=end_timeB)) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename, decode_cf=True) as f: - bounds_exp = np.array([[start_timeA, end_timeB]], dtype='datetime64[m]') - np.testing.assert_array_equal(f['time_bnds'], bounds_exp) + bounds_exp = np.array([[start_timeA, end_timeB]], dtype="datetime64[m]") + np.testing.assert_array_equal(f["time_bnds"], bounds_exp) def test_bounds_missing_time_info(self): """Test time bounds generation in case of missing time.""" @@ -453,19 +453,19 @@ def test_bounds_missing_time_info(self): end_timeA = datetime(2018, 5, 30, 10, 15) test_arrayA = np.array([[1, 2], [3, 4]]).reshape(2, 2, 1) test_arrayB = np.array([[1, 2], [3, 5]]).reshape(2, 2, 1) - scn['test-arrayA'] = xr.DataArray(test_arrayA, - dims=['x', 'y', 'time'], - coords={'time': [np.datetime64('2018-05-30T10:05:00')]}, + scn["test-arrayA"] = xr.DataArray(test_arrayA, + dims=["x", "y", "time"], + coords={"time": [np.datetime64("2018-05-30T10:05:00")]}, attrs=dict(start_time=start_timeA, end_time=end_timeA)) - scn['test-arrayB'] = xr.DataArray(test_arrayB, - dims=['x', 'y', 'time'], - coords={'time': [np.datetime64('2018-05-30T10:05:00')]}) + scn["test-arrayB"] = xr.DataArray(test_arrayB, + dims=["x", "y", "time"], + coords={"time": [np.datetime64("2018-05-30T10:05:00")]}) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename, decode_cf=True) as f: - bounds_exp = np.array([[start_timeA, end_timeA]], dtype='datetime64[m]') - np.testing.assert_array_equal(f['time_bnds'], bounds_exp) + bounds_exp = np.array([[start_timeA, end_timeA]], dtype="datetime64[m]") + np.testing.assert_array_equal(f["time_bnds"], bounds_exp) def test_unlimited_dims_kwarg(self): """Test specification of unlimited dimensions.""" @@ -473,51 +473,51 @@ def test_unlimited_dims_kwarg(self): start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) test_array = np.array([[1, 2], [3, 4]]) - scn['test-array'] = xr.DataArray(test_array, - dims=['x', 'y'], - coords={'time': np.datetime64('2018-05-30T10:05:00')}, + scn["test-array"] = xr.DataArray(test_array, + dims=["x", "y"], + coords={"time": np.datetime64("2018-05-30T10:05:00")}, attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', unlimited_dims=['time']) + scn.save_datasets(filename=filename, writer="cf", unlimited_dims=["time"]) with xr.open_dataset(filename) as f: - assert set(f.encoding['unlimited_dims']) == {'time'} + assert set(f.encoding["unlimited_dims"]) == {"time"} def test_header_attrs(self): """Check global attributes are set.""" scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) - scn['test-array'] = xr.DataArray([1, 2, 3], + scn["test-array"] = xr.DataArray([1, 2, 3], attrs=dict(start_time=start_time, end_time=end_time)) with TempFile() as filename: - header_attrs = {'sensor': 'SEVIRI', - 'orbit': 99999, - 'none': None, - 'list': [1, 2, 3], - 'set': {1, 2, 3}, - 'dict': {'a': 1, 'b': 2}, - 'nested': {'outer': {'inner1': 1, 'inner2': 2}}, - 'bool': True, - 'bool_': np.bool_(True)} + header_attrs = {"sensor": "SEVIRI", + "orbit": 99999, + "none": None, + "list": [1, 2, 3], + "set": {1, 2, 3}, + "dict": {"a": 1, "b": 2}, + "nested": {"outer": {"inner1": 1, "inner2": 2}}, + "bool": True, + "bool_": np.bool_(True)} scn.save_datasets(filename=filename, header_attrs=header_attrs, flatten_attrs=True, - writer='cf') + writer="cf") with xr.open_dataset(filename) as f: - assert 'history' in f.attrs - assert f.attrs['sensor'] == 'SEVIRI' - assert f.attrs['orbit'] == 99999 - np.testing.assert_array_equal(f.attrs['list'], [1, 2, 3]) - assert f.attrs['set'] == '{1, 2, 3}' - assert f.attrs['dict_a'] == 1 - assert f.attrs['dict_b'] == 2 - assert f.attrs['nested_outer_inner1'] == 1 - assert f.attrs['nested_outer_inner2'] == 2 - assert f.attrs['bool'] == 'true' - assert f.attrs['bool_'] == 'true' - assert 'none' not in f.attrs.keys() + assert "history" in f.attrs + assert f.attrs["sensor"] == "SEVIRI" + assert f.attrs["orbit"] == 99999 + np.testing.assert_array_equal(f.attrs["list"], [1, 2, 3]) + assert f.attrs["set"] == "{1, 2, 3}" + assert f.attrs["dict_a"] == 1 + assert f.attrs["dict_b"] == 2 + assert f.attrs["nested_outer_inner1"] == 1 + assert f.attrs["nested_outer_inner2"] == 2 + assert f.attrs["bool"] == "true" + assert f.attrs["bool_"] == "true" + assert "none" not in f.attrs.keys() def get_test_attrs(self): """Create some dataset attributes for testing purpose. @@ -526,79 +526,79 @@ def get_test_attrs(self): Attributes, encoded attributes, encoded and flattened attributes """ - attrs = {'name': 'IR_108', - 'start_time': datetime(2018, 1, 1, 0), - 'end_time': datetime(2018, 1, 1, 0, 15), - 'int': 1, - 'float': 1.0, - 'none': None, # should be dropped - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': True, - 'numpy_void': np.void(0), - 'numpy_bytes': np.bytes_('test'), - 'numpy_string': np.str_('test'), - 'list': [1, 2, np.float64(3)], - 'nested_list': ["1", ["2", [3]]], - 'bool': True, - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': np.array([True, False, True]), - 'array_2d': np.array([[1, 2], [3, 4]]), - 'array_3d': np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), - 'dict': {'a': 1, 'b': 2}, - 'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}}, - 'raw_metadata': OrderedDict([ - ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])), - ('flag', np.bool_(True)), - ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))])) + attrs = {"name": "IR_108", + "start_time": datetime(2018, 1, 1, 0), + "end_time": datetime(2018, 1, 1, 0, 15), + "int": 1, + "float": 1.0, + "none": None, # should be dropped + "numpy_int": np.uint8(1), + "numpy_float": np.float32(1), + "numpy_bool": True, + "numpy_void": np.void(0), + "numpy_bytes": np.bytes_("test"), + "numpy_string": np.str_("test"), + "list": [1, 2, np.float64(3)], + "nested_list": ["1", ["2", [3]]], + "bool": True, + "array": np.array([1, 2, 3], dtype="uint8"), + "array_bool": np.array([True, False, True]), + "array_2d": np.array([[1, 2], [3, 4]]), + "array_3d": np.array([[[1, 2], [3, 4]], [[1, 2], [3, 4]]]), + "dict": {"a": 1, "b": 2}, + "nested_dict": {"l1": {"l2": {"l3": np.array([1, 2, 3], dtype="uint8")}}}, + "raw_metadata": OrderedDict([ + ("recarray", np.zeros(3, dtype=[("x", "i4"), ("y", "u1")])), + ("flag", np.bool_(True)), + ("dict", OrderedDict([("a", 1), ("b", np.array([1, 2, 3], dtype="uint8"))])) ])} - encoded = {'name': 'IR_108', - 'start_time': '2018-01-01 00:00:00', - 'end_time': '2018-01-01 00:15:00', - 'int': 1, - 'float': 1.0, - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': 'true', - 'numpy_void': '[]', - 'numpy_bytes': 'test', - 'numpy_string': 'test', - 'list': [1, 2, np.float64(3)], - 'nested_list': '["1", ["2", [3]]]', - 'bool': 'true', - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': ['true', 'false', 'true'], - 'array_2d': '[[1, 2], [3, 4]]', - 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', - 'dict': '{"a": 1, "b": 2}', - 'nested_dict': '{"l1": {"l2": {"l3": [1, 2, 3]}}}', - 'raw_metadata': '{"recarray": [[0, 0], [0, 0], [0, 0]], ' + encoded = {"name": "IR_108", + "start_time": "2018-01-01 00:00:00", + "end_time": "2018-01-01 00:15:00", + "int": 1, + "float": 1.0, + "numpy_int": np.uint8(1), + "numpy_float": np.float32(1), + "numpy_bool": "true", + "numpy_void": "[]", + "numpy_bytes": "test", + "numpy_string": "test", + "list": [1, 2, np.float64(3)], + "nested_list": '["1", ["2", [3]]]', + "bool": "true", + "array": np.array([1, 2, 3], dtype="uint8"), + "array_bool": ["true", "false", "true"], + "array_2d": "[[1, 2], [3, 4]]", + "array_3d": "[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]", + "dict": '{"a": 1, "b": 2}', + "nested_dict": '{"l1": {"l2": {"l3": [1, 2, 3]}}}', + "raw_metadata": '{"recarray": [[0, 0], [0, 0], [0, 0]], ' '"flag": "true", "dict": {"a": 1, "b": [1, 2, 3]}}'} - encoded_flat = {'name': 'IR_108', - 'start_time': '2018-01-01 00:00:00', - 'end_time': '2018-01-01 00:15:00', - 'int': 1, - 'float': 1.0, - 'numpy_int': np.uint8(1), - 'numpy_float': np.float32(1), - 'numpy_bool': 'true', - 'numpy_void': '[]', - 'numpy_bytes': 'test', - 'numpy_string': 'test', - 'list': [1, 2, np.float64(3)], - 'nested_list': '["1", ["2", [3]]]', - 'bool': 'true', - 'array': np.array([1, 2, 3], dtype='uint8'), - 'array_bool': ['true', 'false', 'true'], - 'array_2d': '[[1, 2], [3, 4]]', - 'array_3d': '[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]', - 'dict_a': 1, - 'dict_b': 2, - 'nested_dict_l1_l2_l3': np.array([1, 2, 3], dtype='uint8'), - 'raw_metadata_recarray': '[[0, 0], [0, 0], [0, 0]]', - 'raw_metadata_flag': 'true', - 'raw_metadata_dict_a': 1, - 'raw_metadata_dict_b': np.array([1, 2, 3], dtype='uint8')} + encoded_flat = {"name": "IR_108", + "start_time": "2018-01-01 00:00:00", + "end_time": "2018-01-01 00:15:00", + "int": 1, + "float": 1.0, + "numpy_int": np.uint8(1), + "numpy_float": np.float32(1), + "numpy_bool": "true", + "numpy_void": "[]", + "numpy_bytes": "test", + "numpy_string": "test", + "list": [1, 2, np.float64(3)], + "nested_list": '["1", ["2", [3]]]', + "bool": "true", + "array": np.array([1, 2, 3], dtype="uint8"), + "array_bool": ["true", "false", "true"], + "array_2d": "[[1, 2], [3, 4]]", + "array_3d": "[[[1, 2], [3, 4]], [[1, 2], [3, 4]]]", + "dict_a": 1, + "dict_b": 2, + "nested_dict_l1_l2_l3": np.array([1, 2, 3], dtype="uint8"), + "raw_metadata_recarray": "[[0, 0], [0, 0], [0, 0]]", + "raw_metadata_flag": "true", + "raw_metadata_dict_a": 1, + "raw_metadata_dict_b": np.array([1, 2, 3], dtype="uint8")} return attrs, encoded, encoded_flat def assertDictWithArraysEqual(self, d1, d2): @@ -626,13 +626,13 @@ def test_encode_attrs_nc(self): self.assertDictWithArraysEqual(expected, encoded) # Test decoding of json-encoded attributes - raw_md_roundtrip = {'recarray': [[0, 0], [0, 0], [0, 0]], - 'flag': 'true', - 'dict': {'a': 1, 'b': [1, 2, 3]}} - assert json.loads(encoded['raw_metadata']) == raw_md_roundtrip - assert json.loads(encoded['array_3d']) == [[[1, 2], [3, 4]], [[1, 2], [3, 4]]] - assert json.loads(encoded['nested_dict']) == {"l1": {"l2": {"l3": [1, 2, 3]}}} - assert json.loads(encoded['nested_list']) == ["1", ["2", [3]]] + raw_md_roundtrip = {"recarray": [[0, 0], [0, 0], [0, 0]], + "flag": "true", + "dict": {"a": 1, "b": [1, 2, 3]}} + assert json.loads(encoded["raw_metadata"]) == raw_md_roundtrip + assert json.loads(encoded["array_3d"]) == [[[1, 2], [3, 4]], [[1, 2], [3, 4]]] + assert json.loads(encoded["nested_dict"]) == {"l1": {"l2": {"l3": [1, 2, 3]}}} + assert json.loads(encoded["nested_list"]) == ["1", ["2", [3]]] def test_da2cf(self): """Test the conversion of a DataArray to a CF-compatible DataArray.""" @@ -640,44 +640,44 @@ def test_da2cf(self): # Create set of test attributes attrs, attrs_expected, attrs_expected_flat = self.get_test_attrs() - attrs['area'] = 'some_area' - attrs['prerequisites'] = [make_dsq(name='hej')] - attrs['_satpy_id_name'] = 'myname' + attrs["area"] = "some_area" + attrs["prerequisites"] = [make_dsq(name="hej")] + attrs["_satpy_id_name"] = "myname" # Adjust expected attributes expected_prereq = ("DataQuery(name='hej')") - update = {'prerequisites': [expected_prereq], 'long_name': attrs['name']} + update = {"prerequisites": [expected_prereq], "long_name": attrs["name"]} attrs_expected.update(update) attrs_expected_flat.update(update) - attrs_expected.pop('name') - attrs_expected_flat.pop('name') + attrs_expected.pop("name") + attrs_expected_flat.pop("name") # Create test data array - arr = xr.DataArray(np.array([[1, 2], [3, 4]]), attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [1, 2], 'acq_time': ('y', [3, 4])}) + arr = xr.DataArray(np.array([[1, 2], [3, 4]]), attrs=attrs, dims=("y", "x"), + coords={"y": [0, 1], "x": [1, 2], "acq_time": ("y", [3, 4])}) # Test conversion to something cf-compliant res = CFWriter.da2cf(arr) - np.testing.assert_array_equal(res['x'], arr['x']) - np.testing.assert_array_equal(res['y'], arr['y']) - np.testing.assert_array_equal(res['acq_time'], arr['acq_time']) - assert res['x'].attrs == {'units': 'm', 'standard_name': 'projection_x_coordinate'} - assert res['y'].attrs == {'units': 'm', 'standard_name': 'projection_y_coordinate'} + np.testing.assert_array_equal(res["x"], arr["x"]) + np.testing.assert_array_equal(res["y"], arr["y"]) + np.testing.assert_array_equal(res["acq_time"], arr["acq_time"]) + assert res["x"].attrs == {"units": "m", "standard_name": "projection_x_coordinate"} + assert res["y"].attrs == {"units": "m", "standard_name": "projection_y_coordinate"} self.assertDictWithArraysEqual(res.attrs, attrs_expected) # Test attribute kwargs - res_flat = CFWriter.da2cf(arr, flatten_attrs=True, exclude_attrs=['int']) - attrs_expected_flat.pop('int') + res_flat = CFWriter.da2cf(arr, flatten_attrs=True, exclude_attrs=["int"]) + attrs_expected_flat.pop("int") self.assertDictWithArraysEqual(res_flat.attrs, attrs_expected_flat) def test_da2cf_one_dimensional_array(self): """Test the conversion of an 1d DataArray to a CF-compatible DataArray.""" from satpy.writers.cf_writer import CFWriter - arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=('y',), - coords={'y': [0, 1, 2, 3], 'acq_time': ('y', [0, 1, 2, 3])}) + arr = xr.DataArray(np.array([1, 2, 3, 4]), attrs={}, dims=("y",), + coords={"y": [0, 1, 2, 3], "acq_time": ("y", [0, 1, 2, 3])}) _ = CFWriter.da2cf(arr) def test_collect_cf_dataarrays(self): @@ -685,10 +685,10 @@ def test_collect_cf_dataarrays(self): from satpy.writers.cf_writer import _collect_cf_dataset geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, + area_id="geos", + description="geos", + proj_id="geos", + projection={"proj": "geos", "h": 35785831., "a": 6378169., "b": 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) @@ -699,39 +699,39 @@ def test_collect_cf_dataarrays(self): time = [1, 2] tstart = datetime(2019, 4, 1, 12, 0) tend = datetime(2019, 4, 1, 12, 15) - list_dataarrays = [xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, - attrs={'name': 'var1', 'start_time': tstart, 'end_time': tend, 'area': geos}), - xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)}, - attrs={'name': 'var2', 'long_name': 'variable 2'})] + list_dataarrays = [xr.DataArray(data=data, dims=("y", "x"), coords={"y": y, "x": x, "acq_time": ("y", time)}, + attrs={"name": "var1", "start_time": tstart, "end_time": tend, "area": geos}), + xr.DataArray(data=data, dims=("y", "x"), coords={"y": y, "x": x, "acq_time": ("y", time)}, + attrs={"name": "var2", "long_name": "variable 2"})] # Collect datasets ds = _collect_cf_dataset(list_dataarrays, include_lonlats=True) # Test results assert len(ds.keys()) == 3 - assert set(ds.keys()) == {'var1', 'var2', 'geos'} + assert set(ds.keys()) == {"var1", "var2", "geos"} - da_var1 = ds['var1'] - da_var2 = ds['var2'] - assert da_var1.name == 'var1' - assert da_var1.attrs['grid_mapping'] == 'geos' - assert da_var1.attrs['long_name'] == 'var1' + da_var1 = ds["var1"] + da_var2 = ds["var2"] + assert da_var1.name == "var1" + assert da_var1.attrs["grid_mapping"] == "geos" + assert da_var1.attrs["long_name"] == "var1" # variable 2 - assert 'grid_mapping' not in da_var2.attrs - assert da_var2.attrs['long_name'] == 'variable 2' + assert "grid_mapping" not in da_var2.attrs + assert da_var2.attrs["long_name"] == "variable 2" def test_assert_xy_unique(self): """Test that the x and y coordinates are unique.""" from satpy.writers.cf_writer import assert_xy_unique dummy = [[1, 2], [3, 4]] - datas = {'a': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), - 'b': xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}), - 'n': xr.DataArray(data=dummy, dims=('v', 'w'), coords={'v': [1, 2], 'w': [3, 4]})} + datas = {"a": xr.DataArray(data=dummy, dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}), + "b": xr.DataArray(data=dummy, dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}), + "n": xr.DataArray(data=dummy, dims=("v", "w"), coords={"v": [1, 2], "w": [3, 4]})} assert_xy_unique(datas) - datas['c'] = xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 3], 'x': [3, 4]}) - with pytest.raises(ValueError): + datas["c"] = xr.DataArray(data=dummy, dims=("y", "x"), coords={"y": [1, 3], "x": [3, 4]}) + with pytest.raises(ValueError, match="Datasets .* must have identical projection coordinates..*"): assert_xy_unique(datas) def test_link_coords(self): @@ -743,31 +743,31 @@ def test_link_coords(self): lon2 = np.zeros((1, 2, 2)) lat = np.ones((2, 2)) datasets = { - 'var1': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon lat'}), - 'var2': xr.DataArray(data=data, dims=('y', 'x')), - 'var3': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'lon2 lat'}), - 'var4': xr.DataArray(data=data, dims=('y', 'x'), attrs={'coordinates': 'not_exist lon lat'}), - 'lon': xr.DataArray(data=lon, dims=('y', 'x')), - 'lon2': xr.DataArray(data=lon2, dims=('time', 'y', 'x')), - 'lat': xr.DataArray(data=lat, dims=('y', 'x')) + "var1": xr.DataArray(data=data, dims=("y", "x"), attrs={"coordinates": "lon lat"}), + "var2": xr.DataArray(data=data, dims=("y", "x")), + "var3": xr.DataArray(data=data, dims=("y", "x"), attrs={"coordinates": "lon2 lat"}), + "var4": xr.DataArray(data=data, dims=("y", "x"), attrs={"coordinates": "not_exist lon lat"}), + "lon": xr.DataArray(data=lon, dims=("y", "x")), + "lon2": xr.DataArray(data=lon2, dims=("time", "y", "x")), + "lat": xr.DataArray(data=lat, dims=("y", "x")) } link_coords(datasets) # Check that link has been established correctly and 'coordinate' atrribute has been dropped - assert 'lon' in datasets['var1'].coords - assert 'lat' in datasets['var1'].coords - np.testing.assert_array_equal(datasets['var1']['lon'].data, lon) - np.testing.assert_array_equal(datasets['var1']['lat'].data, lat) - assert 'coordinates' not in datasets['var1'].attrs + assert "lon" in datasets["var1"].coords + assert "lat" in datasets["var1"].coords + np.testing.assert_array_equal(datasets["var1"]["lon"].data, lon) + np.testing.assert_array_equal(datasets["var1"]["lat"].data, lat) + assert "coordinates" not in datasets["var1"].attrs # There should be no link if there was no 'coordinate' attribute - assert 'lon' not in datasets['var2'].coords - assert 'lat' not in datasets['var2'].coords + assert "lon" not in datasets["var2"].coords + assert "lat" not in datasets["var2"].coords # The non-existent dimension or coordinate should be dropped - assert 'time' not in datasets['var3'].coords - assert 'not_exist' not in datasets['var4'].coords + assert "time" not in datasets["var3"].coords + assert "not_exist" not in datasets["var4"].coords def test_make_alt_coords_unique(self): """Test that created coordinate variables are unique.""" @@ -778,87 +778,87 @@ def test_make_alt_coords_unique(self): x = [1, 2] time1 = [1, 2] time2 = [3, 4] - datasets = {'var1': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x, 'acq_time': ('y', time1)}), - 'var2': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x, 'acq_time': ('y', time2)})} + datasets = {"var1": xr.DataArray(data=data, + dims=("y", "x"), + coords={"y": y, "x": x, "acq_time": ("y", time1)}), + "var2": xr.DataArray(data=data, + dims=("y", "x"), + coords={"y": y, "x": x, "acq_time": ("y", time2)})} # Test that dataset names are prepended to alternative coordinates res = make_alt_coords_unique(datasets) - np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) - np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) - assert 'acq_time' not in res['var1'].coords - assert 'acq_time' not in res['var2'].coords + np.testing.assert_array_equal(res["var1"]["var1_acq_time"], time1) + np.testing.assert_array_equal(res["var2"]["var2_acq_time"], time2) + assert "acq_time" not in res["var1"].coords + assert "acq_time" not in res["var2"].coords # Make sure nothing else is modified - np.testing.assert_array_equal(res['var1']['x'], x) - np.testing.assert_array_equal(res['var1']['y'], y) - np.testing.assert_array_equal(res['var2']['x'], x) - np.testing.assert_array_equal(res['var2']['y'], y) + np.testing.assert_array_equal(res["var1"]["x"], x) + np.testing.assert_array_equal(res["var1"]["y"], y) + np.testing.assert_array_equal(res["var2"]["x"], x) + np.testing.assert_array_equal(res["var2"]["y"], y) # Coords not unique -> Dataset names must be prepended, even if pretty=True with pytest.warns(UserWarning, match='Cannot pretty-format "acq_time"'): res = make_alt_coords_unique(datasets, pretty=True) - np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1) - np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2) - assert 'acq_time' not in res['var1'].coords - assert 'acq_time' not in res['var2'].coords + np.testing.assert_array_equal(res["var1"]["var1_acq_time"], time1) + np.testing.assert_array_equal(res["var2"]["var2_acq_time"], time2) + assert "acq_time" not in res["var1"].coords + assert "acq_time" not in res["var2"].coords # Coords unique and pretty=True -> Don't modify coordinate names - datasets['var2']['acq_time'] = ('y', time1) + datasets["var2"]["acq_time"] = ("y", time1) res = make_alt_coords_unique(datasets, pretty=True) - np.testing.assert_array_equal(res['var1']['acq_time'], time1) - np.testing.assert_array_equal(res['var2']['acq_time'], time1) - assert 'var1_acq_time' not in res['var1'].coords - assert 'var2_acq_time' not in res['var2'].coords + np.testing.assert_array_equal(res["var1"]["acq_time"], time1) + np.testing.assert_array_equal(res["var2"]["acq_time"], time1) + assert "var1_acq_time" not in res["var1"].coords + assert "var2_acq_time" not in res["var2"].coords def test_area2cf(self): """Test the conversion of an area to CF standards.""" from satpy.writers.cf_writer import area2cf - ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}, - attrs={'name': 'var1'}) + ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}, + attrs={"name": "var1"}) # a) Area Definition and strict=False geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, + area_id="geos", + description="geos", + proj_id="geos", + projection={"proj": "geos", "h": 35785831., "a": 6378169., "b": 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) ds = ds_base.copy(deep=True) - ds.attrs['area'] = geos + ds.attrs["area"] = geos res = area2cf(ds, include_lonlats=False) assert len(res) == 2 assert res[0].size == 1 # grid mapping variable - assert res[0].name == res[1].attrs['grid_mapping'] + assert res[0].name == res[1].attrs["grid_mapping"] # b) Area Definition and include_lonlats=False ds = ds_base.copy(deep=True) - ds.attrs['area'] = geos + ds.attrs["area"] = geos res = area2cf(ds, include_lonlats=True) # same as above assert len(res) == 2 assert res[0].size == 1 # grid mapping variable - assert res[0].name == res[1].attrs['grid_mapping'] + assert res[0].name == res[1].attrs["grid_mapping"] # but now also have the lon/lats - assert 'longitude' in res[1].coords - assert 'latitude' in res[1].coords + assert "longitude" in res[1].coords + assert "latitude" in res[1].coords # c) Swath Definition swath = pyresample.geometry.SwathDefinition(lons=[[1, 1], [2, 2]], lats=[[1, 2], [1, 2]]) ds = ds_base.copy(deep=True) - ds.attrs['area'] = swath + ds.attrs["area"] = swath res = area2cf(ds, include_lonlats=False) assert len(res) == 1 - assert 'longitude' in res[0].coords - assert 'latitude' in res[0].coords - assert 'grid_mapping' not in res[0].attrs + assert "longitude" in res[0].coords + assert "latitude" in res[0].coords + assert "grid_mapping" not in res[0].attrs def test__add_grid_mapping(self): """Test the conversion from pyresample area object to CF grid mapping.""" @@ -873,175 +873,175 @@ def _gm_matches(gmapping, expected): else: np.testing.assert_almost_equal(test_val, attr_val, decimal=3) - ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]}, - attrs={'name': 'var1'}) + ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=("y", "x"), coords={"y": [1, 2], "x": [3, 4]}, + attrs={"name": "var1"}) # a) Projection has a corresponding CF representation (e.g. geos) a = 6378169. b = 6356583.8 h = 35785831. geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': h, 'a': a, 'b': b, - 'lat_0': 0, 'lon_0': 0}, + area_id="geos", + description="geos", + proj_id="geos", + projection={"proj": "geos", "h": h, "a": a, "b": b, + "lat_0": 0, "lon_0": 0}, width=2, height=2, area_extent=[-1, -1, 1, 1]) geos_expected = xr.DataArray(data=0, - attrs={'perspective_point_height': h, - 'latitude_of_projection_origin': 0, - 'longitude_of_projection_origin': 0, - 'grid_mapping_name': 'geostationary', - 'semi_major_axis': a, - 'semi_minor_axis': b, + attrs={"perspective_point_height": h, + "latitude_of_projection_origin": 0, + "longitude_of_projection_origin": 0, + "grid_mapping_name": "geostationary", + "semi_major_axis": a, + "semi_minor_axis": b, # 'sweep_angle_axis': None, }) ds = ds_base.copy() - ds.attrs['area'] = geos + ds.attrs["area"] = geos new_ds, grid_mapping = _add_grid_mapping(ds) - if 'sweep_angle_axis' in grid_mapping.attrs: + if "sweep_angle_axis" in grid_mapping.attrs: # older versions of pyproj might not include this - assert grid_mapping.attrs['sweep_angle_axis'] == 'y' + assert grid_mapping.attrs["sweep_angle_axis"] == "y" - assert new_ds.attrs['grid_mapping'] == 'geos' + assert new_ds.attrs["grid_mapping"] == "geos" _gm_matches(grid_mapping, geos_expected) # should not have been modified - assert 'grid_mapping' not in ds.attrs + assert "grid_mapping" not in ds.attrs # b) Projection does not have a corresponding CF representation (COSMO) cosmo7 = pyresample.geometry.AreaDefinition( - area_id='cosmo7', - description='cosmo7', - proj_id='cosmo7', - projection={'proj': 'ob_tran', 'ellps': 'WGS84', 'lat_0': 46, 'lon_0': 4.535, - 'o_proj': 'stere', 'o_lat_p': 90, 'o_lon_p': -5.465}, + area_id="cosmo7", + description="cosmo7", + proj_id="cosmo7", + projection={"proj": "ob_tran", "ellps": "WGS84", "lat_0": 46, "lon_0": 4.535, + "o_proj": "stere", "o_lat_p": 90, "o_lon_p": -5.465}, width=597, height=510, area_extent=[-1812933, -1003565, 814056, 1243448] ) ds = ds_base.copy() - ds.attrs['area'] = cosmo7 + ds.attrs["area"] = cosmo7 new_ds, grid_mapping = _add_grid_mapping(ds) - assert 'crs_wkt' in grid_mapping.attrs - wkt = grid_mapping.attrs['crs_wkt'] + assert "crs_wkt" in grid_mapping.attrs + wkt = grid_mapping.attrs["crs_wkt"] assert 'ELLIPSOID["WGS 84"' in wkt assert 'PARAMETER["lat_0",46' in wkt assert 'PARAMETER["lon_0",4.535' in wkt assert 'PARAMETER["o_lat_p",90' in wkt assert 'PARAMETER["o_lon_p",-5.465' in wkt - assert new_ds.attrs['grid_mapping'] == 'cosmo7' + assert new_ds.attrs["grid_mapping"] == "cosmo7" # c) Projection Transverse Mercator lat_0 = 36.5 lon_0 = 15.0 tmerc = pyresample.geometry.AreaDefinition( - area_id='tmerc', - description='tmerc', - proj_id='tmerc', - projection={'proj': 'tmerc', 'ellps': 'WGS84', 'lat_0': 36.5, 'lon_0': 15.0}, + area_id="tmerc", + description="tmerc", + proj_id="tmerc", + projection={"proj": "tmerc", "ellps": "WGS84", "lat_0": 36.5, "lon_0": 15.0}, width=2, height=2, area_extent=[-1, -1, 1, 1]) tmerc_expected = xr.DataArray(data=0, - attrs={'latitude_of_projection_origin': lat_0, - 'longitude_of_central_meridian': lon_0, - 'grid_mapping_name': 'transverse_mercator', - 'reference_ellipsoid_name': 'WGS 84', - 'false_easting': 0., - 'false_northing': 0., + attrs={"latitude_of_projection_origin": lat_0, + "longitude_of_central_meridian": lon_0, + "grid_mapping_name": "transverse_mercator", + "reference_ellipsoid_name": "WGS 84", + "false_easting": 0., + "false_northing": 0., }) ds = ds_base.copy() - ds.attrs['area'] = tmerc + ds.attrs["area"] = tmerc new_ds, grid_mapping = _add_grid_mapping(ds) - assert new_ds.attrs['grid_mapping'] == 'tmerc' + assert new_ds.attrs["grid_mapping"] == "tmerc" _gm_matches(grid_mapping, tmerc_expected) # d) Projection that has a representation but no explicit a/b h = 35785831. geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': h, 'datum': 'WGS84', 'ellps': 'GRS80', - 'lat_0': 0, 'lon_0': 0}, + area_id="geos", + description="geos", + proj_id="geos", + projection={"proj": "geos", "h": h, "datum": "WGS84", "ellps": "GRS80", + "lat_0": 0, "lon_0": 0}, width=2, height=2, area_extent=[-1, -1, 1, 1]) geos_expected = xr.DataArray(data=0, - attrs={'perspective_point_height': h, - 'latitude_of_projection_origin': 0, - 'longitude_of_projection_origin': 0, - 'grid_mapping_name': 'geostationary', + attrs={"perspective_point_height": h, + "latitude_of_projection_origin": 0, + "longitude_of_projection_origin": 0, + "grid_mapping_name": "geostationary", # 'semi_major_axis': 6378137.0, # 'semi_minor_axis': 6356752.314, # 'sweep_angle_axis': None, }) ds = ds_base.copy() - ds.attrs['area'] = geos + ds.attrs["area"] = geos new_ds, grid_mapping = _add_grid_mapping(ds) - assert new_ds.attrs['grid_mapping'] == 'geos' + assert new_ds.attrs["grid_mapping"] == "geos" _gm_matches(grid_mapping, geos_expected) # e) oblique Mercator area = pyresample.geometry.AreaDefinition( - area_id='omerc_otf', - description='On-the-fly omerc area', - proj_id='omerc', - projection={'alpha': '9.02638777018478', 'ellps': 'WGS84', 'gamma': '0', 'k': '1', - 'lat_0': '-0.256794486098476', 'lonc': '13.7888658224205', - 'proj': 'omerc', 'units': 'm'}, + area_id="omerc_otf", + description="On-the-fly omerc area", + proj_id="omerc", + projection={"alpha": "9.02638777018478", "ellps": "WGS84", "gamma": "0", "k": "1", + "lat_0": "-0.256794486098476", "lonc": "13.7888658224205", + "proj": "omerc", "units": "m"}, width=2837, height=5940, area_extent=[-1460463.0893, 3455291.3877, 1538407.1158, 9615788.8787] ) - omerc_dict = {'azimuth_of_central_line': 9.02638777018478, - 'false_easting': 0., - 'false_northing': 0., + omerc_dict = {"azimuth_of_central_line": 9.02638777018478, + "false_easting": 0., + "false_northing": 0., # 'gamma': 0, # this is not CF compliant - 'grid_mapping_name': "oblique_mercator", - 'latitude_of_projection_origin': -0.256794486098476, - 'longitude_of_projection_origin': 13.7888658224205, + "grid_mapping_name": "oblique_mercator", + "latitude_of_projection_origin": -0.256794486098476, + "longitude_of_projection_origin": 13.7888658224205, # 'prime_meridian_name': "Greenwich", - 'reference_ellipsoid_name': "WGS 84"} + "reference_ellipsoid_name": "WGS 84"} omerc_expected = xr.DataArray(data=0, attrs=omerc_dict) ds = ds_base.copy() - ds.attrs['area'] = area + ds.attrs["area"] = area new_ds, grid_mapping = _add_grid_mapping(ds) - assert new_ds.attrs['grid_mapping'] == 'omerc_otf' + assert new_ds.attrs["grid_mapping"] == "omerc_otf" _gm_matches(grid_mapping, omerc_expected) # f) Projection that has a representation but no explicit a/b h = 35785831. geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': h, 'datum': 'WGS84', 'ellps': 'GRS80', - 'lat_0': 0, 'lon_0': 0}, + area_id="geos", + description="geos", + proj_id="geos", + projection={"proj": "geos", "h": h, "datum": "WGS84", "ellps": "GRS80", + "lat_0": 0, "lon_0": 0}, width=2, height=2, area_extent=[-1, -1, 1, 1]) geos_expected = xr.DataArray(data=0, - attrs={'perspective_point_height': h, - 'latitude_of_projection_origin': 0, - 'longitude_of_projection_origin': 0, - 'grid_mapping_name': 'geostationary', - 'reference_ellipsoid_name': 'WGS 84', + attrs={"perspective_point_height": h, + "latitude_of_projection_origin": 0, + "longitude_of_projection_origin": 0, + "grid_mapping_name": "geostationary", + "reference_ellipsoid_name": "WGS 84", }) ds = ds_base.copy() - ds.attrs['area'] = geos + ds.attrs["area"] = geos new_ds, grid_mapping = _add_grid_mapping(ds) - assert new_ds.attrs['grid_mapping'] == 'geos' + assert new_ds.attrs["grid_mapping"] == "geos" _gm_matches(grid_mapping, geos_expected) def test_add_lonlat_coords(self): @@ -1049,50 +1049,50 @@ def test_add_lonlat_coords(self): from satpy.writers.cf_writer import add_lonlat_coords area = pyresample.geometry.AreaDefinition( - 'seviri', - 'Native SEVIRI grid', - 'geos', + "seviri", + "Native SEVIRI grid", + "geos", "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", 2, 2, [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] ) lons_ref, lats_ref = area.get_lonlats() - dataarray = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), attrs={'area': area}) + dataarray = xr.DataArray(data=[[1, 2], [3, 4]], dims=("y", "x"), attrs={"area": area}) res = add_lonlat_coords(dataarray) # original should be unmodified - assert 'longitude' not in dataarray.coords - assert set(res.coords) == {'longitude', 'latitude'} - lat = res['latitude'] - lon = res['longitude'] + assert "longitude" not in dataarray.coords + assert set(res.coords) == {"longitude", "latitude"} + lat = res["latitude"] + lon = res["longitude"] np.testing.assert_array_equal(lat.data, lats_ref) np.testing.assert_array_equal(lon.data, lons_ref) - assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() - assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() + assert {"name": "latitude", "standard_name": "latitude", "units": "degrees_north"}.items() <= lat.attrs.items() + assert {"name": "longitude", "standard_name": "longitude", "units": "degrees_east"}.items() <= lon.attrs.items() area = pyresample.geometry.AreaDefinition( - 'seviri', - 'Native SEVIRI grid', - 'geos', + "seviri", + "Native SEVIRI grid", + "geos", "+a=6378169.0 +h=35785831.0 +b=6356583.8 +lon_0=0 +proj=geos", 10, 10, [-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662] ) lons_ref, lats_ref = area.get_lonlats() dataarray = xr.DataArray(data=da.from_array(np.arange(3 * 10 * 10).reshape(3, 10, 10), chunks=(1, 5, 5)), - dims=('bands', 'y', 'x'), attrs={'area': area}) + dims=("bands", "y", "x"), attrs={"area": area}) res = add_lonlat_coords(dataarray) # original should be unmodified - assert 'longitude' not in dataarray.coords - assert set(res.coords) == {'longitude', 'latitude'} - lat = res['latitude'] - lon = res['longitude'] + assert "longitude" not in dataarray.coords + assert set(res.coords) == {"longitude", "latitude"} + lat = res["latitude"] + lon = res["longitude"] np.testing.assert_array_equal(lat.data, lats_ref) np.testing.assert_array_equal(lon.data, lons_ref) - assert {'name': 'latitude', 'standard_name': 'latitude', 'units': 'degrees_north'}.items() <= lat.attrs.items() - assert {'name': 'longitude', 'standard_name': 'longitude', 'units': 'degrees_east'}.items() <= lon.attrs.items() + assert {"name": "latitude", "standard_name": "latitude", "units": "degrees_north"}.items() <= lat.attrs.items() + assert {"name": "longitude", "standard_name": "longitude", "units": "degrees_east"}.items() <= lon.attrs.items() def test_load_module_with_old_pyproj(self): """Test that cf_writer can still be loaded with pyproj 1.9.6.""" @@ -1100,105 +1100,105 @@ def test_load_module_with_old_pyproj(self): import sys import pyproj # noqa 401 - old_version = sys.modules['pyproj'].__version__ - sys.modules['pyproj'].__version__ = "1.9.6" + old_version = sys.modules["pyproj"].__version__ + sys.modules["pyproj"].__version__ = "1.9.6" try: - importlib.reload(sys.modules['satpy.writers.cf_writer']) + importlib.reload(sys.modules["satpy.writers.cf_writer"]) finally: # Tear down - sys.modules['pyproj'].__version__ = old_version - importlib.reload(sys.modules['satpy.writers.cf_writer']) + sys.modules["pyproj"].__version__ = old_version + importlib.reload(sys.modules["satpy.writers.cf_writer"]) def test_global_attr_default_history_and_Conventions(self): """Test saving global attributes history and Conventions.""" scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) - scn['test-array'] = xr.DataArray([[1, 2, 3]], - dims=('y', 'x'), + scn["test-array"] = xr.DataArray([[1, 2, 3]], + dims=("y", "x"), attrs=dict(start_time=start_time, end_time=end_time, - prerequisites=[make_dsq(name='hej')])) + prerequisites=[make_dsq(name="hej")])) with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf') + scn.save_datasets(filename=filename, writer="cf") with xr.open_dataset(filename) as f: - assert f.attrs['Conventions'] == 'CF-1.7' - assert 'Created by pytroll/satpy on' in f.attrs['history'] + assert f.attrs["Conventions"] == "CF-1.7" + assert "Created by pytroll/satpy on" in f.attrs["history"] def test_global_attr_history_and_Conventions(self): """Test saving global attributes history and Conventions.""" scn = Scene() start_time = datetime(2018, 5, 30, 10, 0) end_time = datetime(2018, 5, 30, 10, 15) - scn['test-array'] = xr.DataArray([[1, 2, 3]], - dims=('y', 'x'), + scn["test-array"] = xr.DataArray([[1, 2, 3]], + dims=("y", "x"), attrs=dict(start_time=start_time, end_time=end_time, - prerequisites=[make_dsq(name='hej')])) + prerequisites=[make_dsq(name="hej")])) header_attrs = {} - header_attrs['history'] = ('TEST add history',) - header_attrs['Conventions'] = 'CF-1.7, ACDD-1.3' + header_attrs["history"] = ("TEST add history",) + header_attrs["Conventions"] = "CF-1.7, ACDD-1.3" with TempFile() as filename: - scn.save_datasets(filename=filename, writer='cf', header_attrs=header_attrs) + scn.save_datasets(filename=filename, writer="cf", header_attrs=header_attrs) with xr.open_dataset(filename) as f: - assert f.attrs['Conventions'] == 'CF-1.7, ACDD-1.3' - assert 'TEST add history\n' in f.attrs['history'] - assert 'Created by pytroll/satpy on' in f.attrs['history'] + assert f.attrs["Conventions"] == "CF-1.7, ACDD-1.3" + assert "TEST add history\n" in f.attrs["history"] + assert "Created by pytroll/satpy on" in f.attrs["history"] class TestCFWriterData: """Test case for CF writer where data arrays are needed.""" - @pytest.fixture + @pytest.fixture() def datasets(self): """Create test dataset.""" data = [[75, 2], [3, 4]] y = [1, 2] x = [1, 2] geos = pyresample.geometry.AreaDefinition( - area_id='geos', - description='geos', - proj_id='geos', - projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8}, + area_id="geos", + description="geos", + proj_id="geos", + projection={"proj": "geos", "h": 35785831., "a": 6378169., "b": 6356583.8}, width=2, height=2, area_extent=[-1, -1, 1, 1]) datasets = { - 'var1': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'var2': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'lat': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x}), - 'lon': xr.DataArray(data=data, - dims=('y', 'x'), - coords={'y': y, 'x': x})} - datasets['lat'].attrs['standard_name'] = 'latitude' - datasets['var1'].attrs['standard_name'] = 'dummy' - datasets['var2'].attrs['standard_name'] = 'dummy' - datasets['var2'].attrs['area'] = geos - datasets['var1'].attrs['area'] = geos - datasets['lat'].attrs['name'] = 'lat' - datasets['var1'].attrs['name'] = 'var1' - datasets['var2'].attrs['name'] = 'var2' - datasets['lon'].attrs['name'] = 'lon' + "var1": xr.DataArray(data=data, + dims=("y", "x"), + coords={"y": y, "x": x}), + "var2": xr.DataArray(data=data, + dims=("y", "x"), + coords={"y": y, "x": x}), + "lat": xr.DataArray(data=data, + dims=("y", "x"), + coords={"y": y, "x": x}), + "lon": xr.DataArray(data=data, + dims=("y", "x"), + coords={"y": y, "x": x})} + datasets["lat"].attrs["standard_name"] = "latitude" + datasets["var1"].attrs["standard_name"] = "dummy" + datasets["var2"].attrs["standard_name"] = "dummy" + datasets["var2"].attrs["area"] = geos + datasets["var1"].attrs["area"] = geos + datasets["lat"].attrs["name"] = "lat" + datasets["var1"].attrs["name"] = "var1" + datasets["var2"].attrs["name"] = "var2" + datasets["lon"].attrs["name"] = "lon" return datasets def test_is_lon_or_lat_dataarray(self, datasets): """Test the is_lon_or_lat_dataarray function.""" from satpy.writers.cf_writer import is_lon_or_lat_dataarray - assert is_lon_or_lat_dataarray(datasets['lat']) - assert not is_lon_or_lat_dataarray(datasets['var1']) + assert is_lon_or_lat_dataarray(datasets["lat"]) + assert not is_lon_or_lat_dataarray(datasets["var1"]) def test_has_projection_coords(self, datasets): """Test the has_projection_coords function.""" from satpy.writers.cf_writer import has_projection_coords assert has_projection_coords(datasets) - datasets['lat'].attrs['standard_name'] = 'dummy' + datasets["lat"].attrs["standard_name"] = "dummy" assert not has_projection_coords(datasets) def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets): @@ -1206,7 +1206,7 @@ def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets): from satpy.writers.cf_writer import _collect_cf_dataset datasets_list = [datasets[key] for key in datasets.keys()] - datasets_list_no_latlon = [datasets[key] for key in ['var1', 'var2']] + datasets_list_no_latlon = [datasets[key] for key in ["var1", "var2"]] # Collect datasets ds = _collect_cf_dataset(datasets_list, include_lonlats=True) @@ -1214,36 +1214,36 @@ def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets): # Test results assert len(ds.keys()) == 5 - assert set(ds.keys()) == {'var1', 'var2', 'lon', 'lat', 'geos'} + assert set(ds.keys()) == {"var1", "var2", "lon", "lat", "geos"} with pytest.raises(KeyError): - ds['var1'].attrs["latitude"] + ds["var1"].attrs["latitude"] with pytest.raises(KeyError): - ds['var1'].attrs["longitude"] - assert ds2['var1']['latitude'].attrs['name'] == 'latitude' - assert ds2['var1']['longitude'].attrs['name'] == 'longitude' + ds["var1"].attrs["longitude"] + assert ds2["var1"]["latitude"].attrs["name"] == "latitude" + assert ds2["var1"]["longitude"].attrs["name"] == "longitude" class EncodingUpdateTest: """Test update of netCDF encoding.""" - @pytest.fixture + @pytest.fixture() def fake_ds(self): """Create fake data for testing.""" - ds = xr.Dataset({'foo': (('y', 'x'), [[1, 2], [3, 4]]), - 'bar': (('y', 'x'), [[3, 4], [5, 6]])}, - coords={'y': [1, 2], - 'x': [3, 4], - 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) + ds = xr.Dataset({"foo": (("y", "x"), [[1, 2], [3, 4]]), + "bar": (("y", "x"), [[3, 4], [5, 6]])}, + coords={"y": [1, 2], + "x": [3, 4], + "lon": (("y", "x"), [[7, 8], [9, 10]])}) return ds - @pytest.fixture + @pytest.fixture() def fake_ds_digit(self): """Create fake data for testing.""" - ds_digit = xr.Dataset({'CHANNEL_1': (('y', 'x'), [[1, 2], [3, 4]]), - 'CHANNEL_2': (('y', 'x'), [[3, 4], [5, 6]])}, - coords={'y': [1, 2], - 'x': [3, 4], - 'lon': (('y', 'x'), [[7, 8], [9, 10]])}) + ds_digit = xr.Dataset({"CHANNEL_1": (("y", "x"), [[1, 2], [3, 4]]), + "CHANNEL_2": (("y", "x"), [[3, 4], [5, 6]])}, + coords={"y": [1, 2], + "x": [3, 4], + "lon": (("y", "x"), [[7, 8], [9, 10]])}) return ds_digit def test_dataset_name_digit(self, fake_ds_digit): @@ -1252,18 +1252,18 @@ def test_dataset_name_digit(self, fake_ds_digit): # Dataset with name staring with digit ds_digit = fake_ds_digit - kwargs = {'encoding': {'1': {'dtype': 'float32'}, - '2': {'dtype': 'float32'}}, - 'other': 'kwargs'} - enc, other_kwargs = update_encoding(ds_digit, kwargs, numeric_name_prefix='CHANNEL_') + kwargs = {"encoding": {"1": {"dtype": "float32"}, + "2": {"dtype": "float32"}}, + "other": "kwargs"} + enc, other_kwargs = update_encoding(ds_digit, kwargs, numeric_name_prefix="CHANNEL_") expected_dict = { - 'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'CHANNEL_1': {'dtype': 'float32'}, - 'CHANNEL_2': {'dtype': 'float32'} + "y": {"_FillValue": None}, + "x": {"_FillValue": None}, + "CHANNEL_1": {"dtype": "float32"}, + "CHANNEL_2": {"dtype": "float32"} } assert enc == expected_dict - assert other_kwargs == {'other': 'kwargs'} + assert other_kwargs == {"other": "kwargs"} def test_without_time(self, fake_ds): """Test data with no time dimension.""" @@ -1271,29 +1271,29 @@ def test_without_time(self, fake_ds): # Without time dimension ds = fake_ds.chunk(2) - kwargs = {'encoding': {'bar': {'chunksizes': (1, 1)}}, - 'other': 'kwargs'} + kwargs = {"encoding": {"bar": {"chunksizes": (1, 1)}}, + "other": "kwargs"} enc, other_kwargs = update_encoding(ds, kwargs) expected_dict = { - 'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (2, 2)}, - 'bar': {'chunksizes': (1, 1)} + "y": {"_FillValue": None}, + "x": {"_FillValue": None}, + "lon": {"chunksizes": (2, 2)}, + "foo": {"chunksizes": (2, 2)}, + "bar": {"chunksizes": (1, 1)} } assert enc == expected_dict - assert other_kwargs == {'other': 'kwargs'} + assert other_kwargs == {"other": "kwargs"} # Chunksize may not exceed shape ds = fake_ds.chunk(8) - kwargs = {'encoding': {}, 'other': 'kwargs'} + kwargs = {"encoding": {}, "other": "kwargs"} enc, other_kwargs = update_encoding(ds, kwargs) expected_dict = { - 'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (2, 2)}, - 'bar': {'chunksizes': (2, 2)} + "y": {"_FillValue": None}, + "x": {"_FillValue": None}, + "lon": {"chunksizes": (2, 2)}, + "foo": {"chunksizes": (2, 2)}, + "bar": {"chunksizes": (2, 2)} } assert enc == expected_dict @@ -1302,32 +1302,32 @@ def test_with_time(self, fake_ds): from satpy.writers.cf_writer import update_encoding # With time dimension - ds = fake_ds.chunk(8).expand_dims({'time': [datetime(2009, 7, 1, 12, 15)]}) - kwargs = {'encoding': {'bar': {'chunksizes': (1, 1, 1)}}, - 'other': 'kwargs'} + ds = fake_ds.chunk(8).expand_dims({"time": [datetime(2009, 7, 1, 12, 15)]}) + kwargs = {"encoding": {"bar": {"chunksizes": (1, 1, 1)}}, + "other": "kwargs"} enc, other_kwargs = update_encoding(ds, kwargs) expected_dict = { - 'y': {'_FillValue': None}, - 'x': {'_FillValue': None}, - 'lon': {'chunksizes': (2, 2)}, - 'foo': {'chunksizes': (1, 2, 2)}, - 'bar': {'chunksizes': (1, 1, 1)}, - 'time': {'_FillValue': None, - 'calendar': 'proleptic_gregorian', - 'units': 'days since 2009-07-01 12:15:00'}, - 'time_bnds': {'_FillValue': None, - 'calendar': 'proleptic_gregorian', - 'units': 'days since 2009-07-01 12:15:00'} + "y": {"_FillValue": None}, + "x": {"_FillValue": None}, + "lon": {"chunksizes": (2, 2)}, + "foo": {"chunksizes": (1, 2, 2)}, + "bar": {"chunksizes": (1, 1, 1)}, + "time": {"_FillValue": None, + "calendar": "proleptic_gregorian", + "units": "days since 2009-07-01 12:15:00"}, + "time_bnds": {"_FillValue": None, + "calendar": "proleptic_gregorian", + "units": "days since 2009-07-01 12:15:00"} } assert enc == expected_dict # User-defined encoding may not be altered - assert kwargs['encoding'] == {'bar': {'chunksizes': (1, 1, 1)}} + assert kwargs["encoding"] == {"bar": {"chunksizes": (1, 1, 1)}} class TestEncodingKwarg: """Test CF writer with 'encoding' keyword argument.""" - @pytest.fixture + @pytest.fixture() def scene(self): """Create a fake scene.""" scn = Scene() @@ -1335,7 +1335,7 @@ def scene(self): "start_time": datetime(2018, 5, 30, 10, 0), "end_time": datetime(2018, 5, 30, 10, 15) } - scn['test-array'] = xr.DataArray([1., 2, 3], attrs=attrs) + scn["test-array"] = xr.DataArray([1., 2, 3], attrs=attrs) return scn @pytest.fixture(params=[True, False]) @@ -1343,15 +1343,15 @@ def compression_on(self, request): """Get compression options.""" return request.param - @pytest.fixture + @pytest.fixture() def encoding(self, compression_on): """Get encoding.""" enc = { - 'test-array': { - 'dtype': 'int8', - 'scale_factor': 0.1, - 'add_offset': 0.0, - '_FillValue': 3, + "test-array": { + "dtype": "int8", + "scale_factor": 0.1, + "add_offset": 0.0, + "_FillValue": 3, } } if compression_on: @@ -1359,19 +1359,19 @@ def encoding(self, compression_on): enc["test-array"].update(comp_params) return enc - @pytest.fixture + @pytest.fixture() def filename(self, tmp_path): """Get output filename.""" return str(tmp_path / "test.nc") - @pytest.fixture + @pytest.fixture() def complevel_exp(self, compression_on): """Get expected compression level.""" if compression_on: return 7 return 0 - @pytest.fixture + @pytest.fixture() def expected(self, complevel_exp): """Get expectated file contents.""" return { @@ -1384,15 +1384,15 @@ def expected(self, complevel_exp): def test_encoding_kwarg(self, scene, encoding, filename, expected): """Test 'encoding' keyword argument.""" - scene.save_datasets(filename=filename, encoding=encoding, writer='cf') + scene.save_datasets(filename=filename, encoding=encoding, writer="cf") self._assert_encoding_as_expected(filename, expected) def _assert_encoding_as_expected(self, filename, expected): with xr.open_dataset(filename, mask_and_scale=False) as f: - np.testing.assert_array_equal(f['test-array'][:], expected["data"]) - assert f['test-array'].attrs['scale_factor'] == expected["scale_factor"] - assert f['test-array'].attrs['_FillValue'] == expected["fill_value"] - assert f['test-array'].dtype == expected["dtype"] + np.testing.assert_array_equal(f["test-array"][:], expected["data"]) + assert f["test-array"].attrs["scale_factor"] == expected["scale_factor"] + assert f["test-array"].attrs["_FillValue"] == expected["fill_value"] + assert f["test-array"].dtype == expected["dtype"] assert f["test-array"].encoding["complevel"] == expected["complevel"] def test_warning_if_backends_dont_match(self, scene, filename, monkeypatch): @@ -1419,7 +1419,7 @@ def test_no_warning_if_backends_match(self, scene, filename, monkeypatch): class TestEncodingAttribute(TestEncodingKwarg): """Test CF writer with 'encoding' dataset attribute.""" - @pytest.fixture + @pytest.fixture() def scene_with_encoding(self, scene, encoding): """Create scene with a dataset providing the 'encoding' attribute.""" scene["test-array"].encoding = encoding["test-array"] @@ -1427,7 +1427,7 @@ def scene_with_encoding(self, scene, encoding): def test_encoding_attribute(self, scene_with_encoding, filename, expected): """Test 'encoding' dataset attribute.""" - scene_with_encoding.save_datasets(filename=filename, writer='cf') + scene_with_encoding.save_datasets(filename=filename, writer="cf") self._assert_encoding_as_expected(filename, expected) diff --git a/satpy/tests/writer_tests/test_geotiff.py b/satpy/tests/writer_tests/test_geotiff.py index bb6afc0c21..74fcd43609 100644 --- a/satpy/tests/writer_tests/test_geotiff.py +++ b/satpy/tests/writer_tests/test_geotiff.py @@ -34,9 +34,9 @@ def _get_test_datasets_2d(): """Create a single 2D test dataset.""" ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), "units": "K"} ) return [ds1] @@ -56,10 +56,10 @@ def _get_test_datasets_3d(): """Create a single 3D test dataset.""" ds1 = xr.DataArray( da.zeros((3, 100, 200), chunks=50), - dims=('bands', 'y', 'x'), - coords={'bands': ['R', 'G', 'B']}, - attrs={'name': 'test', - 'start_time': datetime.utcnow()} + dims=("bands", "y", "x"), + coords={"bands": ["R", "G", "B"]}, + attrs={"name": "test", + "start_time": datetime.utcnow()} ) return [ds1] @@ -103,7 +103,7 @@ def test_simple_delayed_write(self, tmp_path): assert isinstance(res[0][0], da.Array) da.store(res[0], res[1]) for target in res[1]: - if hasattr(target, 'close'): + if hasattr(target, "close"): target.close() def test_colormap_write(self, tmp_path): @@ -138,43 +138,43 @@ def test_dtype_for_enhance_false(self, tmp_path): from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() w = GeoTIFFWriter(base_dir=tmp_path, enhance=False) - with mock.patch('satpy.writers.XRImage.save') as save_method: + with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) - assert save_method.call_args[1]['dtype'] == np.float64 + assert save_method.call_args[1]["dtype"] == np.float64 def test_dtype_for_enhance_false_and_given_dtype(self, tmp_path): """Test that dtype of dataset is used if enhance=False and dtype=uint8.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() w = GeoTIFFWriter(base_dir=tmp_path, enhance=False, dtype=np.uint8) - with mock.patch('satpy.writers.XRImage.save') as save_method: + with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) - assert save_method.call_args[1]['dtype'] == np.uint8 + assert save_method.call_args[1]["dtype"] == np.uint8 def test_fill_value_from_config(self, tmp_path): """Test fill_value coming from the writer config.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() w = GeoTIFFWriter(base_dir=tmp_path) - w.info['fill_value'] = 128 - with mock.patch('satpy.writers.XRImage.save') as save_method: + w.info["fill_value"] = 128 + with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) - assert save_method.call_args[1]['fill_value'] == 128 + assert save_method.call_args[1]["fill_value"] == 128 def test_tags(self, tmp_path): """Test tags being added.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() - w = GeoTIFFWriter(tags={'test1': 1}, base_dir=tmp_path) - w.info['fill_value'] = 128 - with mock.patch('satpy.writers.XRImage.save') as save_method: + w = GeoTIFFWriter(tags={"test1": 1}, base_dir=tmp_path) + w.info["fill_value"] = 128 + with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None - w.save_datasets(datasets, tags={'test2': 2}, compute=False) - called_tags = save_method.call_args[1]['tags'] - assert called_tags == {'test1': 1, 'test2': 2} + w.save_datasets(datasets, tags={"test2": 2}, compute=False) + called_tags = save_method.call_args[1]["tags"] + assert called_tags == {"test1": 1, "test2": 2} @pytest.mark.parametrize( "input_func", @@ -195,11 +195,11 @@ def test_scale_offset(self, input_func, save_kwargs, tmp_path): """Test tags being added.""" from satpy.writers.geotiff import GeoTIFFWriter datasets = input_func() - w = GeoTIFFWriter(tags={'test1': 1}, base_dir=tmp_path) - w.info['fill_value'] = 128 - with mock.patch('satpy.writers.XRImage.save') as save_method: + w = GeoTIFFWriter(tags={"test1": 1}, base_dir=tmp_path) + w.info["fill_value"] = 128 + with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None - w.save_datasets(datasets, tags={'test2': 2}, compute=False, **save_kwargs) + w.save_datasets(datasets, tags={"test2": 2}, compute=False, **save_kwargs) kwarg_name = "include_scale_offset_tags" if "include_scale_offset" in save_kwargs else "scale_offset_tags" kwarg_value = save_method.call_args[1].get(kwarg_name) assert kwarg_value is not None @@ -209,10 +209,10 @@ def test_tiled_value_from_config(self, tmp_path): from satpy.writers.geotiff import GeoTIFFWriter datasets = _get_test_datasets_2d() w = GeoTIFFWriter(base_dir=tmp_path) - with mock.patch('satpy.writers.XRImage.save') as save_method: + with mock.patch("satpy.writers.XRImage.save") as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) - assert save_method.call_args[1]['tiled'] + assert save_method.call_args[1]["tiled"] def test_float_write_with_unit_conversion(self, tmp_path): """Test that geotiffs can be written as floats and convert units.""" diff --git a/satpy/tests/writer_tests/test_mitiff.py b/satpy/tests/writer_tests/test_mitiff.py index 6369cddc51..b4ff371dab 100644 --- a/satpy/tests/writer_tests/test_mitiff.py +++ b/satpy/tests/writer_tests/test_mitiff.py @@ -55,11 +55,11 @@ def _get_test_datasets(self): from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -67,48 +67,48 @@ def _get_test_datasets(self): ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': '1', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'TEST_SENSOR_NAME', - 'area': area_def, - 'prerequisites': ['1'], - 'calibration': 'reflectance', - 'metadata_requirements': { - 'order': ['1'], - 'config': { - '1': {'alias': '1-VIS0.63', - 'calibration': 'reflectance', - 'min-val': '0', - 'max-val': '100'}, + dims=("y", "x"), + attrs={"name": "1", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "TEST_SENSOR_NAME", + "area": area_def, + "prerequisites": ["1"], + "calibration": "reflectance", + "metadata_requirements": { + "order": ["1"], + "config": { + "1": {"alias": "1-VIS0.63", + "calibration": "reflectance", + "min-val": "0", + "max-val": "100"}, }, - 'translate': {'1': '1', + "translate": {"1": "1", }, - 'file_pattern': '1_{start_time:%Y%m%d_%H%M%S}.mitiff' + "file_pattern": "1_{start_time:%Y%m%d_%H%M%S}.mitiff" }} ) ds2 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': '4', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'TEST_SENSOR_NAME', - 'area': area_def, - 'prerequisites': ['4'], - 'calibration': 'brightness_temperature', - 'metadata_requirements': { - 'order': ['4'], - 'config': { - '4': {'alias': '4-IR10.8', - 'calibration': 'brightness_temperature', - 'min-val': '-150', - 'max-val': '50'}, + dims=("y", "x"), + attrs={"name": "4", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "TEST_SENSOR_NAME", + "area": area_def, + "prerequisites": ["4"], + "calibration": "brightness_temperature", + "metadata_requirements": { + "order": ["4"], + "config": { + "4": {"alias": "4-IR10.8", + "calibration": "brightness_temperature", + "min-val": "-150", + "max-val": "50"}, }, - 'translate': {'4': '4', + "translate": {"4": "4", }, - 'file_pattern': '4_{start_time:%Y%m%d_%H%M%S}.mitiff'} + "file_pattern": "4_{start_time:%Y%m%d_%H%M%S}.mitiff"} } ) return [ds1, ds2] @@ -122,11 +122,11 @@ def _get_test_datasets_sensor_set(self): from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -134,48 +134,48 @@ def _get_test_datasets_sensor_set(self): ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': '1', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': {'TEST_SENSOR_NAME'}, - 'area': area_def, - 'prerequisites': ['1'], - 'calibration': 'reflectance', - 'metadata_requirements': { - 'order': ['1'], - 'config': { - '1': {'alias': '1-VIS0.63', - 'calibration': 'reflectance', - 'min-val': '0', - 'max-val': '100'}, + dims=("y", "x"), + attrs={"name": "1", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": {"TEST_SENSOR_NAME"}, + "area": area_def, + "prerequisites": ["1"], + "calibration": "reflectance", + "metadata_requirements": { + "order": ["1"], + "config": { + "1": {"alias": "1-VIS0.63", + "calibration": "reflectance", + "min-val": "0", + "max-val": "100"}, }, - 'translate': {'1': '1', + "translate": {"1": "1", }, - 'file_pattern': '1_{start_time:%Y%m%d_%H%M%S}.mitiff' + "file_pattern": "1_{start_time:%Y%m%d_%H%M%S}.mitiff" }} ) ds2 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': '4', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': {'TEST_SENSOR_NAME'}, - 'area': area_def, - 'prerequisites': ['4'], - 'calibration': 'brightness_temperature', - 'metadata_requirements': { - 'order': ['4'], - 'config': { - '4': {'alias': '4-IR10.8', - 'calibration': 'brightness_temperature', - 'min-val': '-150', - 'max-val': '50'}, + dims=("y", "x"), + attrs={"name": "4", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": {"TEST_SENSOR_NAME"}, + "area": area_def, + "prerequisites": ["4"], + "calibration": "brightness_temperature", + "metadata_requirements": { + "order": ["4"], + "config": { + "4": {"alias": "4-IR10.8", + "calibration": "brightness_temperature", + "min-val": "-150", + "max-val": "50"}, }, - 'translate': {'4': '4', + "translate": {"4": "4", }, - 'file_pattern': '4_{start_time:%Y%m%d_%H%M%S}.mitiff'} + "file_pattern": "4_{start_time:%Y%m%d_%H%M%S}.mitiff"} } ) return [ds1, ds2] @@ -189,11 +189,11 @@ def _get_test_dataset(self, bands=3): from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -201,13 +201,13 @@ def _get_test_dataset(self, bands=3): ds1 = xr.DataArray( da.zeros((bands, 100, 200), chunks=50), - dims=('bands', 'y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'TEST_SENSOR_NAME', - 'area': area_def, - 'prerequisites': ['1', '2', '3']} + dims=("bands", "y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "TEST_SENSOR_NAME", + "area": area_def, + "prerequisites": ["1", "2", "3"]} ) return ds1 @@ -220,11 +220,11 @@ def _get_test_one_dataset(self): from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=geos +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. h=36000. +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=geos +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. h=36000. +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -232,13 +232,13 @@ def _get_test_one_dataset(self): ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'avhrr', - 'area': area_def, - 'prerequisites': [10.8]} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "avhrr", + "area": area_def, + "prerequisites": [10.8]} ) return ds1 @@ -251,11 +251,11 @@ def _get_test_one_dataset_sensor_set(self): from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=geos +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. h=36000. +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=geos +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. h=36000. +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -263,13 +263,13 @@ def _get_test_one_dataset_sensor_set(self): ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': {'avhrr'}, - 'area': area_def, - 'prerequisites': [10.8]} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": {"avhrr"}, + "area": area_def, + "prerequisites": [10.8]} ) return ds1 @@ -281,11 +281,11 @@ def _get_test_dataset_with_bad_values(self, bands=3): from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -298,13 +298,13 @@ def _get_test_dataset_with_bad_values(self, bands=3): rgb_data = np.stack([data, data, data]) ds1 = xr.DataArray(rgb_data, - dims=('bands', 'y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'TEST_SENSOR_NAME', - 'area': area_def, - 'prerequisites': ['1', '2', '3']}) + dims=("bands", "y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "TEST_SENSOR_NAME", + "area": area_def, + "prerequisites": ["1", "2", "3"]}) return ds1 def _get_test_dataset_calibration(self, bands=6): @@ -319,93 +319,93 @@ def _get_test_dataset_calibration(self, bands=6): from satpy.scene import Scene from satpy.tests.utils import make_dsq area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), ) prereqs = [ - make_dsq(name='1', calibration='reflectance'), - make_dsq(name='2', calibration='reflectance'), - make_dsq(name='3', calibration='brightness_temperature'), - make_dsq(name='4', calibration='brightness_temperature'), - make_dsq(name='5', calibration='brightness_temperature'), - make_dsq(name='6', calibration='reflectance') + make_dsq(name="1", calibration="reflectance"), + make_dsq(name="2", calibration="reflectance"), + make_dsq(name="3", calibration="brightness_temperature"), + make_dsq(name="4", calibration="brightness_temperature"), + make_dsq(name="5", calibration="brightness_temperature"), + make_dsq(name="6", calibration="reflectance") ] scene = Scene() scene["1"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'reflectance'}) + dims=("y", "x"), + attrs={"calibration": "reflectance"}) scene["2"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'reflectance'}) + dims=("y", "x"), + attrs={"calibration": "reflectance"}) scene["3"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'brightness_temperature'}) + dims=("y", "x"), + attrs={"calibration": "brightness_temperature"}) scene["4"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'brightness_temperature'}) + dims=("y", "x"), + attrs={"calibration": "brightness_temperature"}) scene["5"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'brightness_temperature'}) + dims=("y", "x"), + attrs={"calibration": "brightness_temperature"}) scene["6"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'reflectance'}) + dims=("y", "x"), + attrs={"calibration": "reflectance"}) - data = xr.concat(scene, 'bands', coords='minimal') + data = xr.concat(scene, "bands", coords="minimal") bands = [] calibration = [] for p in scene: - calibration.append(p.attrs['calibration']) - bands.append(p.attrs['name']) - data['bands'] = list(bands) - new_attrs = {'name': 'datasets', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'test-sensor', - 'area': area_def, - 'prerequisites': prereqs, - 'metadata_requirements': { - 'order': ['1', '2', '3', '4', '5', '6'], - 'config': { - '1': {'alias': '1-VIS0.63', - 'calibration': 'reflectance', - 'min-val': '0', - 'max-val': '100'}, - '2': {'alias': '2-VIS0.86', - 'calibration': 'reflectance', - 'min-val': '0', - 'max-val': '100'}, - '3': {'alias': '3(3B)-IR3.7', - 'calibration': 'brightness_temperature', - 'min-val': '-150', - 'max-val': '50'}, - '4': {'alias': '4-IR10.8', - 'calibration': 'brightness_temperature', - 'min-val': '-150', - 'max-val': '50'}, - '5': {'alias': '5-IR11.5', - 'calibration': 'brightness_temperature', - 'min-val': '-150', - 'max-val': '50'}, - '6': {'alias': '6(3A)-VIS1.6', - 'calibration': 'reflectance', - 'min-val': '0', - 'max-val': '100'} + calibration.append(p.attrs["calibration"]) + bands.append(p.attrs["name"]) + data["bands"] = list(bands) + new_attrs = {"name": "datasets", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "test-sensor", + "area": area_def, + "prerequisites": prereqs, + "metadata_requirements": { + "order": ["1", "2", "3", "4", "5", "6"], + "config": { + "1": {"alias": "1-VIS0.63", + "calibration": "reflectance", + "min-val": "0", + "max-val": "100"}, + "2": {"alias": "2-VIS0.86", + "calibration": "reflectance", + "min-val": "0", + "max-val": "100"}, + "3": {"alias": "3(3B)-IR3.7", + "calibration": "brightness_temperature", + "min-val": "-150", + "max-val": "50"}, + "4": {"alias": "4-IR10.8", + "calibration": "brightness_temperature", + "min-val": "-150", + "max-val": "50"}, + "5": {"alias": "5-IR11.5", + "calibration": "brightness_temperature", + "min-val": "-150", + "max-val": "50"}, + "6": {"alias": "6(3A)-VIS1.6", + "calibration": "reflectance", + "min-val": "0", + "max-val": "100"} }, - 'translate': {'1': '1', - '2': '2', - '3': '3', - '4': '4', - '5': '5', - '6': '6' + "translate": {"1": "1", + "2": "2", + "3": "3", + "4": "4", + "5": "5", + "6": "6" }, - 'file_pattern': 'test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff' + "file_pattern": "test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff" } } ds1 = xr.DataArray(data=data.data, attrs=new_attrs, @@ -424,43 +424,43 @@ def _get_test_dataset_calibration_one_dataset(self, bands=1): from satpy.scene import Scene from satpy.tests.utils import make_dsq area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), ) - prereqs = [make_dsq(name='4', calibration='brightness_temperature')] + prereqs = [make_dsq(name="4", calibration="brightness_temperature")] scene = Scene() scene["4"] = xr.DataArray(da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'calibration': 'brightness_temperature'}) + dims=("y", "x"), + attrs={"calibration": "brightness_temperature"}) - data = scene['4'] + data = scene["4"] calibration = [] for p in scene: - calibration.append(p.attrs['calibration']) - new_attrs = {'name': 'datasets', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'test-sensor', - 'area': area_def, - 'prerequisites': prereqs, - 'metadata_requirements': { - 'order': ['4'], - 'config': { - '4': {'alias': 'BT', - 'calibration': 'brightness_temperature', - 'min-val': '-150', - 'max-val': '50'}, + calibration.append(p.attrs["calibration"]) + new_attrs = {"name": "datasets", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "test-sensor", + "area": area_def, + "prerequisites": prereqs, + "metadata_requirements": { + "order": ["4"], + "config": { + "4": {"alias": "BT", + "calibration": "brightness_temperature", + "min-val": "-150", + "max-val": "50"}, }, - 'translate': {'4': '4', + "translate": {"4": "4", }, - 'file_pattern': 'test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff' + "file_pattern": "test-dataset-{start_time:%Y%m%d%H%M%S}.mitiff" } } ds1 = xr.DataArray(data=data.data, attrs=new_attrs, @@ -478,11 +478,11 @@ def _get_test_dataset_three_bands_two_prereq(self, bands=3): from satpy.tests.utils import make_dsq area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -490,15 +490,15 @@ def _get_test_dataset_three_bands_two_prereq(self, bands=3): ds1 = xr.DataArray( da.zeros((bands, 100, 200), chunks=50), - coords=[['R', 'G', 'B'], list(range(100)), list(range(200))], - dims=('bands', 'y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'TEST_SENSOR_NAME', - 'area': area_def, - 'prerequisites': [make_dsq(name='1', calibration='reflectance'), - make_dsq(name='2', calibration='reflectance')]} + coords=[["R", "G", "B"], list(range(100)), list(range(200))], + dims=("bands", "y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "TEST_SENSOR_NAME", + "area": area_def, + "prerequisites": [make_dsq(name="1", calibration="reflectance"), + make_dsq(name="2", calibration="reflectance")]} ) return ds1 @@ -513,11 +513,11 @@ def _get_test_dataset_three_bands_prereq(self, bands=3): from satpy.tests.utils import make_dsq area_def = AreaDefinition( - 'test', - 'test', - 'test', - proj4_str_to_dict('+proj=stere +datum=WGS84 +ellps=WGS84 ' - '+lon_0=0. +lat_0=90 +lat_ts=60 +units=km'), + "test", + "test", + "test", + proj4_str_to_dict("+proj=stere +datum=WGS84 +ellps=WGS84 " + "+lon_0=0. +lat_0=90 +lat_ts=60 +units=km"), 100, 200, (-1000., -1500., 1000., 1500.), @@ -525,15 +525,15 @@ def _get_test_dataset_three_bands_prereq(self, bands=3): ds1 = xr.DataArray( da.zeros((bands, 100, 200), chunks=50), - coords=[['R', 'G', 'B'], list(range(100)), list(range(200))], - dims=('bands', 'y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow(), - 'platform_name': "TEST_PLATFORM_NAME", - 'sensor': 'TEST_SENSOR_NAME', - 'area': area_def, - 'prerequisites': [make_dsq(wavelength=0.6, modifiers=('sunz_corrected',)), - make_dsq(wavelength=0.8, modifiers=('sunz_corrected',)), + coords=[["R", "G", "B"], list(range(100)), list(range(200))], + dims=("bands", "y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow(), + "platform_name": "TEST_PLATFORM_NAME", + "sensor": "TEST_SENSOR_NAME", + "area": area_def, + "prerequisites": [make_dsq(wavelength=0.6, modifiers=("sunz_corrected",)), + make_dsq(wavelength=0.8, modifiers=("sunz_corrected",)), 10.8]}) return ds1 @@ -547,7 +547,7 @@ def _read_back_mitiff_and_check(self, filename, expected, test_shape=(100, 200)) def _imagedescription_from_mitiff(self, filename): pillow_tif = Image.open(filename) IMAGEDESCRIPTION = 270 - imgdesc = (pillow_tif.tag_v2.get(IMAGEDESCRIPTION)).split('\n') + imgdesc = (pillow_tif.tag_v2.get(IMAGEDESCRIPTION)).split("\n") return imgdesc def test_init(self): @@ -569,8 +569,8 @@ def test_save_datasets(self): dataset = self._get_test_datasets() w = MITIFFWriter(base_dir=self.base_dir) w.save_datasets(dataset) - filename = (dataset[0].attrs['metadata_requirements']['file_pattern']).format( - start_time=dataset[0].attrs['start_time']) + filename = (dataset[0].attrs["metadata_requirements"]["file_pattern"]).format( + start_time=dataset[0].attrs["start_time"]) self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_save_datasets_sensor_set(self): @@ -580,8 +580,8 @@ def test_save_datasets_sensor_set(self): dataset = self._get_test_datasets_sensor_set() w = MITIFFWriter(base_dir=self.base_dir) w.save_datasets(dataset) - filename = (dataset[0].attrs['metadata_requirements']['file_pattern']).format( - start_time=dataset[0].attrs['start_time']) + filename = (dataset[0].attrs["metadata_requirements"]["file_pattern"]).format( + start_time=dataset[0].attrs["start_time"]) self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_save_one_dataset(self): @@ -592,8 +592,8 @@ def test_save_one_dataset(self): w.save_dataset(dataset) imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, os.listdir(self.base_dir)[0])) for key in imgdesc: - if 'In this file' in key: - self.assertEqual(key, ' Channels: 1 In this file: 1') + if "In this file" in key: + assert key == " Channels: 1 In this file: 1" def test_save_one_dataset_sensor_set(self): """Test basic writer operation with one dataset ie. no bands.""" @@ -603,8 +603,8 @@ def test_save_one_dataset_sensor_set(self): w.save_dataset(dataset) imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, os.listdir(self.base_dir)[0])) for key in imgdesc: - if 'In this file' in key: - self.assertEqual(key, ' Channels: 1 In this file: 1') + if "In this file" in key: + assert key == " Channels: 1 In this file: 1" def test_save_dataset_with_calibration(self): """Test writer operation with calibration.""" @@ -613,167 +613,167 @@ def test_save_dataset_with_calibration(self): expected_ir = np.full((100, 200), 255) expected_vis = np.full((100, 200), 0) expected = np.stack([expected_vis, expected_vis, expected_ir, expected_ir, expected_ir, expected_vis]) - expected_key_channel = ['Table_calibration: 1-VIS0.63, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 1.57 ' - '1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 8.24 ' - '8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 14.12 ' - '14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 19.61 ' - '20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 25.10 ' - '25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 30.59 ' - '30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 36.08 ' - '36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 41.57 ' - '41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 47.06 ' - '47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 52.55 ' - '52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 58.04 ' - '58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 63.53 ' - '63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 69.02 ' - '69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 74.51 ' - '74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 80.00 ' - '80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 85.49 ' - '85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 90.98 ' - '91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 96.47 ' - '96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]', - 'Table_calibration: 2-VIS0.86, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 1.57 ' - '1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 8.24 ' - '8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 14.12 ' - '14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 19.61 ' - '20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 25.10 ' - '25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 30.59 ' - '30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 36.08 ' - '36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 41.57 ' - '41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 47.06 ' - '47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 52.55 ' - '52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 58.04 ' - '58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 63.53 ' - '63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 69.02 ' - '69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 74.51 ' - '74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 80.00 ' - '80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 85.49 ' - '85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 90.98 ' - '91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 96.47 ' - '96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]', - u'Table_calibration: 3(3B)-IR3.7, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 ' - '45.29 44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 ' - '34.31 33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 ' - '23.33 22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 ' - '12.35 11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 ' - '-0.20 -0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 ' - '-11.18 -11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 ' - '-20.59 -21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 ' - '-30.00 -30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 ' - '-39.41 -40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 ' - '-48.82 -49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 ' - '-58.24 -59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 ' - '-67.65 -68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 ' - '-77.06 -77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 ' - '-86.47 -87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 ' - '-95.88 -96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 ' - '-104.51 -105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 ' - '-112.35 -113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 ' - '-120.20 -120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 ' - '-128.04 -128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 ' - '-135.88 -136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 ' - '-143.73 -144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]', - u'Table_calibration: 4-IR10.8, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 ' - '45.29 ' - '44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 ' - '33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 ' - '22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 ' - '11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 ' - '-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 ' - '-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 ' - '-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 ' - '-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 ' - '-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 ' - '-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 ' - '-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 ' - '-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 ' - '-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 ' - '-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 ' - '-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 ' - '-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 ' - '-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 ' - '-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 ' - '-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 ' - '-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 ' - '-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]', - u'Table_calibration: 5-IR11.5, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 ' - '45.29 ' - '44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 ' - '33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 ' - '22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 ' - '11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 ' - '-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 ' - '-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 ' - '-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 ' - '-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 ' - '-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 ' - '-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 ' - '-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 ' - '-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 ' - '-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 ' - '-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 ' - '-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 ' - '-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 ' - '-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 ' - '-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 ' - '-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 ' - '-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 ' - '-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]', - 'Table_calibration: 6(3A)-VIS1.6, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 ' - '1.57 1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 ' - '8.24 8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 ' - '14.12 14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 ' - '19.61 20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 ' - '25.10 25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 ' - '30.59 30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 ' - '36.08 36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 ' - '41.57 41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 ' - '47.06 47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 ' - '52.55 52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 ' - '58.04 58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 ' - '63.53 63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 ' - '69.02 69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 ' - '74.51 74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 ' - '80.00 80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 ' - '85.49 85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 ' - '90.98 91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 ' - '96.47 96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]'] + expected_key_channel = ["Table_calibration: 1-VIS0.63, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 1.57 " + "1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 8.24 " + "8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 14.12 " + "14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 19.61 " + "20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 25.10 " + "25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 30.59 " + "30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 36.08 " + "36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 41.57 " + "41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 47.06 " + "47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 52.55 " + "52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 58.04 " + "58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 63.53 " + "63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 69.02 " + "69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 74.51 " + "74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 80.00 " + "80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 85.49 " + "85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 90.98 " + "91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 96.47 " + "96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]", + "Table_calibration: 2-VIS0.86, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 1.57 " + "1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 8.24 " + "8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 14.12 " + "14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 19.61 " + "20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 25.10 " + "25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 30.59 " + "30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 36.08 " + "36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 41.57 " + "41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 47.06 " + "47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 52.55 " + "52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 58.04 " + "58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 63.53 " + "63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 69.02 " + "69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 74.51 " + "74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 80.00 " + "80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 85.49 " + "85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 90.98 " + "91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 96.47 " + "96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]", + u"Table_calibration: 3(3B)-IR3.7, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 " + "45.29 44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 " + "34.31 33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 " + "23.33 22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 " + "12.35 11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 " + "-0.20 -0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 " + "-11.18 -11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 " + "-20.59 -21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 " + "-30.00 -30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 " + "-39.41 -40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 " + "-48.82 -49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 " + "-58.24 -59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 " + "-67.65 -68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 " + "-77.06 -77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 " + "-86.47 -87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 " + "-95.88 -96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 " + "-104.51 -105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 " + "-112.35 -113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 " + "-120.20 -120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 " + "-128.04 -128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 " + "-135.88 -136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 " + "-143.73 -144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]", + u"Table_calibration: 4-IR10.8, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 " + "45.29 " + "44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 " + "33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 " + "22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 " + "11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 " + "-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 " + "-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 " + "-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 " + "-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 " + "-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 " + "-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 " + "-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 " + "-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 " + "-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 " + "-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 " + "-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 " + "-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 " + "-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 " + "-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 " + "-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 " + "-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 " + "-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]", + u"Table_calibration: 5-IR11.5, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 " + "45.29 " + "44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 " + "33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 " + "22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 " + "11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 " + "-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 " + "-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 " + "-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 " + "-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 " + "-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 " + "-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 " + "-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 " + "-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 " + "-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 " + "-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 " + "-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 " + "-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 " + "-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 " + "-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 " + "-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 " + "-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 " + "-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]", + "Table_calibration: 6(3A)-VIS1.6, Reflectance(Albedo), [%], 8, [ 0.00 0.39 0.78 1.18 " + "1.57 1.96 2.35 2.75 3.14 3.53 3.92 4.31 4.71 5.10 5.49 5.88 6.27 6.67 7.06 7.45 7.84 " + "8.24 8.63 9.02 9.41 9.80 10.20 10.59 10.98 11.37 11.76 12.16 12.55 12.94 13.33 13.73 " + "14.12 14.51 14.90 15.29 15.69 16.08 16.47 16.86 17.25 17.65 18.04 18.43 18.82 19.22 " + "19.61 20.00 20.39 20.78 21.18 21.57 21.96 22.35 22.75 23.14 23.53 23.92 24.31 24.71 " + "25.10 25.49 25.88 26.27 26.67 27.06 27.45 27.84 28.24 28.63 29.02 29.41 29.80 30.20 " + "30.59 30.98 31.37 31.76 32.16 32.55 32.94 33.33 33.73 34.12 34.51 34.90 35.29 35.69 " + "36.08 36.47 36.86 37.25 37.65 38.04 38.43 38.82 39.22 39.61 40.00 40.39 40.78 41.18 " + "41.57 41.96 42.35 42.75 43.14 43.53 43.92 44.31 44.71 45.10 45.49 45.88 46.27 46.67 " + "47.06 47.45 47.84 48.24 48.63 49.02 49.41 49.80 50.20 50.59 50.98 51.37 51.76 52.16 " + "52.55 52.94 53.33 53.73 54.12 54.51 54.90 55.29 55.69 56.08 56.47 56.86 57.25 57.65 " + "58.04 58.43 58.82 59.22 59.61 60.00 60.39 60.78 61.18 61.57 61.96 62.35 62.75 63.14 " + "63.53 63.92 64.31 64.71 65.10 65.49 65.88 66.27 66.67 67.06 67.45 67.84 68.24 68.63 " + "69.02 69.41 69.80 70.20 70.59 70.98 71.37 71.76 72.16 72.55 72.94 73.33 73.73 74.12 " + "74.51 74.90 75.29 75.69 76.08 76.47 76.86 77.25 77.65 78.04 78.43 78.82 79.22 79.61 " + "80.00 80.39 80.78 81.18 81.57 81.96 82.35 82.75 83.14 83.53 83.92 84.31 84.71 85.10 " + "85.49 85.88 86.27 86.67 87.06 87.45 87.84 88.24 88.63 89.02 89.41 89.80 90.20 90.59 " + "90.98 91.37 91.76 92.16 92.55 92.94 93.33 93.73 94.12 94.51 94.90 95.29 95.69 96.08 " + "96.47 96.86 97.25 97.65 98.04 98.43 98.82 99.22 99.61 100.00 ]"] dataset = self._get_test_dataset_calibration() - w = MITIFFWriter(filename=dataset.attrs['metadata_requirements']['file_pattern'], base_dir=self.base_dir) + w = MITIFFWriter(filename=dataset.attrs["metadata_requirements"]["file_pattern"], base_dir=self.base_dir) w.save_dataset(dataset) - filename = (dataset.attrs['metadata_requirements']['file_pattern']).format( - start_time=dataset.attrs['start_time']) + filename = (dataset.attrs["metadata_requirements"]["file_pattern"]).format( + start_time=dataset.attrs["start_time"]) imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, filename)) found_table_calibration = False number_of_calibrations = 0 for key in imgdesc: - if 'Table_calibration' in key: + if "Table_calibration" in key: found_table_calibration = True - if '1-VIS0.63' in key: - self.assertEqual(key, expected_key_channel[0]) + if "1-VIS0.63" in key: + assert key == expected_key_channel[0] number_of_calibrations += 1 - elif '2-VIS0.86' in key: - self.assertEqual(key, expected_key_channel[1]) + elif "2-VIS0.86" in key: + assert key == expected_key_channel[1] number_of_calibrations += 1 - elif '3(3B)-IR3.7' in key: - self.assertEqual(key, expected_key_channel[2]) + elif "3(3B)-IR3.7" in key: + assert key == expected_key_channel[2] number_of_calibrations += 1 - elif '4-IR10.8' in key: - self.assertEqual(key, expected_key_channel[3]) + elif "4-IR10.8" in key: + assert key == expected_key_channel[3] number_of_calibrations += 1 - elif '5-IR11.5' in key: - self.assertEqual(key, expected_key_channel[4]) + elif "5-IR11.5" in key: + assert key == expected_key_channel[4] number_of_calibrations += 1 - elif '6(3A)-VIS1.6' in key: - self.assertEqual(key, expected_key_channel[5]) + elif "6(3A)-VIS1.6" in key: + assert key == expected_key_channel[5] number_of_calibrations += 1 else: self.fail("Not a valid channel description i the given key.") - self.assertTrue(found_table_calibration, "Table_calibration is not found in the imagedescription.") - self.assertEqual(number_of_calibrations, 6) + assert found_table_calibration, "Table_calibration is not found in the imagedescription." + assert number_of_calibrations == 6 pillow_tif = Image.open(os.path.join(self.base_dir, filename)) - self.assertEqual(pillow_tif.n_frames, 6) + assert pillow_tif.n_frames == 6 self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_save_dataset_with_calibration_one_dataset(self): @@ -781,46 +781,46 @@ def test_save_dataset_with_calibration_one_dataset(self): from satpy.writers.mitiff import MITIFFWriter expected = [np.full((100, 200), 255)] - expected_key_channel = [u'Table_calibration: BT, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 45.29 ' - '44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 ' - '33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 ' - '22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 ' - '11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 ' - '-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 ' - '-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 ' - '-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 ' - '-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 ' - '-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 ' - '-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 ' - '-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 ' - '-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 ' - '-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 ' - '-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 ' - '-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 ' - '-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 ' - '-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 ' - '-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 ' - '-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 ' - '-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 ' - '-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]', ] + expected_key_channel = [u"Table_calibration: BT, BT, °[C], 8, [ 50.00 49.22 48.43 47.65 46.86 46.08 45.29 " + "44.51 43.73 42.94 42.16 41.37 40.59 39.80 39.02 38.24 37.45 36.67 35.88 35.10 34.31 " + "33.53 32.75 31.96 31.18 30.39 29.61 28.82 28.04 27.25 26.47 25.69 24.90 24.12 23.33 " + "22.55 21.76 20.98 20.20 19.41 18.63 17.84 17.06 16.27 15.49 14.71 13.92 13.14 12.35 " + "11.57 10.78 10.00 9.22 8.43 7.65 6.86 6.08 5.29 4.51 3.73 2.94 2.16 1.37 0.59 -0.20 " + "-0.98 -1.76 -2.55 -3.33 -4.12 -4.90 -5.69 -6.47 -7.25 -8.04 -8.82 -9.61 -10.39 -11.18 " + "-11.96 -12.75 -13.53 -14.31 -15.10 -15.88 -16.67 -17.45 -18.24 -19.02 -19.80 -20.59 " + "-21.37 -22.16 -22.94 -23.73 -24.51 -25.29 -26.08 -26.86 -27.65 -28.43 -29.22 -30.00 " + "-30.78 -31.57 -32.35 -33.14 -33.92 -34.71 -35.49 -36.27 -37.06 -37.84 -38.63 -39.41 " + "-40.20 -40.98 -41.76 -42.55 -43.33 -44.12 -44.90 -45.69 -46.47 -47.25 -48.04 -48.82 " + "-49.61 -50.39 -51.18 -51.96 -52.75 -53.53 -54.31 -55.10 -55.88 -56.67 -57.45 -58.24 " + "-59.02 -59.80 -60.59 -61.37 -62.16 -62.94 -63.73 -64.51 -65.29 -66.08 -66.86 -67.65 " + "-68.43 -69.22 -70.00 -70.78 -71.57 -72.35 -73.14 -73.92 -74.71 -75.49 -76.27 -77.06 " + "-77.84 -78.63 -79.41 -80.20 -80.98 -81.76 -82.55 -83.33 -84.12 -84.90 -85.69 -86.47 " + "-87.25 -88.04 -88.82 -89.61 -90.39 -91.18 -91.96 -92.75 -93.53 -94.31 -95.10 -95.88 " + "-96.67 -97.45 -98.24 -99.02 -99.80 -100.59 -101.37 -102.16 -102.94 -103.73 -104.51 " + "-105.29 -106.08 -106.86 -107.65 -108.43 -109.22 -110.00 -110.78 -111.57 -112.35 " + "-113.14 -113.92 -114.71 -115.49 -116.27 -117.06 -117.84 -118.63 -119.41 -120.20 " + "-120.98 -121.76 -122.55 -123.33 -124.12 -124.90 -125.69 -126.47 -127.25 -128.04 " + "-128.82 -129.61 -130.39 -131.18 -131.96 -132.75 -133.53 -134.31 -135.10 -135.88 " + "-136.67 -137.45 -138.24 -139.02 -139.80 -140.59 -141.37 -142.16 -142.94 -143.73 " + "-144.51 -145.29 -146.08 -146.86 -147.65 -148.43 -149.22 -150.00 ]", ] dataset = self._get_test_dataset_calibration_one_dataset() - w = MITIFFWriter(filename=dataset.attrs['metadata_requirements']['file_pattern'], base_dir=self.base_dir) + w = MITIFFWriter(filename=dataset.attrs["metadata_requirements"]["file_pattern"], base_dir=self.base_dir) w.save_dataset(dataset) - filename = (dataset.attrs['metadata_requirements']['file_pattern']).format( - start_time=dataset.attrs['start_time']) + filename = (dataset.attrs["metadata_requirements"]["file_pattern"]).format( + start_time=dataset.attrs["start_time"]) imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, filename)) found_table_calibration = False number_of_calibrations = 0 for key in imgdesc: - if 'Table_calibration' in key: + if "Table_calibration" in key: found_table_calibration = True - if 'BT' in key: - self.assertEqual(key, expected_key_channel[0]) + if "BT" in key: + assert key == expected_key_channel[0] number_of_calibrations += 1 - self.assertTrue(found_table_calibration, "Expected table_calibration is not found in the imagedescription.") - self.assertEqual(number_of_calibrations, 1) + assert found_table_calibration, "Expected table_calibration is not found in the imagedescription." + assert number_of_calibrations == 1 self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_save_dataset_with_bad_value(self): @@ -833,8 +833,8 @@ def test_save_dataset_with_bad_value(self): dataset = self._get_test_dataset_with_bad_values() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) - filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs['name'], - dataset.attrs['start_time']) + filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs["name"], + dataset.attrs["start_time"]) self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected, test_shape=(2, 5)) def test_convert_proj4_string(self): @@ -844,32 +844,32 @@ def test_convert_proj4_string(self): from pyresample.geometry import AreaDefinition from satpy.writers.mitiff import MITIFFWriter - checks = [{'epsg': '+init=EPSG:32631', - 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 ' - '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' - '+y_0=1515.000000\n')}, - {'epsg': '+init=EPSG:32632', - 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=9 +k=0.9996 ' - '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' - '+y_0=1515.000000\n')}, - {'epsg': '+init=EPSG:32633', - 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=15 +k=0.9996 ' - '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' - '+y_0=1515.000000\n')}, - {'epsg': '+init=EPSG:32634', - 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=21 +k=0.9996 ' - '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' - '+y_0=1515.000000\n')}, - {'epsg': '+init=EPSG:32635', - 'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=27 +k=0.9996 ' - '+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 ' - '+y_0=1515.000000\n')}] + checks = [{"epsg": "+init=EPSG:32631", + "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 " + "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " + "+y_0=1515.000000\n")}, + {"epsg": "+init=EPSG:32632", + "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=9 +k=0.9996 " + "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " + "+y_0=1515.000000\n")}, + {"epsg": "+init=EPSG:32633", + "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=15 +k=0.9996 " + "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " + "+y_0=1515.000000\n")}, + {"epsg": "+init=EPSG:32634", + "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=21 +k=0.9996 " + "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " + "+y_0=1515.000000\n")}, + {"epsg": "+init=EPSG:32635", + "proj4": (" Proj string: +proj=etmerc +lat_0=0 +lon_0=27 +k=0.9996 " + "+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 " + "+y_0=1515.000000\n")}] for check in checks: area_def = AreaDefinition( - 'test', - 'test', - 'test', - check['epsg'], + "test", + "test", + "test", + check["epsg"], 100, 200, (-1000., -1500., 1000., 1500.), @@ -877,13 +877,13 @@ def test_convert_proj4_string(self): ds1 = xr.DataArray( da.zeros((10, 20), chunks=20), - dims=('y', 'x'), - attrs={'area': area_def} + dims=("y", "x"), + attrs={"area": area_def} ) - w = MITIFFWriter(filename='dummy.tif', base_dir=self.base_dir) + w = MITIFFWriter(filename="dummy.tif", base_dir=self.base_dir) proj4_string = w._add_proj4_string(ds1, ds1) - self.assertEqual(proj4_string, check['proj4']) + assert proj4_string == check["proj4"] def test_save_dataset_palette(self): """Test writer operation as palette.""" @@ -918,27 +918,27 @@ def test_save_dataset_palette(self): 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] color_map = (0, 1, 2, 3, 4, 5) - pal_desc = ['test', 'test2'] + pal_desc = ["test", "test2"] unit = "Test" dataset = self._get_test_one_dataset() - palette = {'palette': True, - 'palette_color_map': color_map, - 'palette_description': pal_desc, - 'palette_unit': unit, - 'palette_channel_name': dataset.attrs['name']} + palette = {"palette": True, + "palette_color_map": color_map, + "palette_description": pal_desc, + "palette_unit": unit, + "palette_channel_name": dataset.attrs["name"]} w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset, **palette) - filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs['name'], - dataset.attrs['start_time']) + filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs["name"], + dataset.attrs["start_time"]) pillow_tif = Image.open(os.path.join(self.base_dir, filename)) # Need to check PHOTOMETRIC is 3, ie palette - self.assertEqual(pillow_tif.tag_v2.get(262), 3) + assert pillow_tif.tag_v2.get(262) == 3 # Check the colormap of the palette image palette = pillow_tif.palette colormap = list((palette.getdata())[1]) - self.assertEqual(colormap, exp_c) + assert colormap == exp_c imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, filename)) found_color_info = False unit_name_found = False @@ -958,14 +958,14 @@ def test_save_dataset_palette(self): unit_name = key unit_name_found = True found_color_info = False - elif 'COLOR INFO:' in key: + elif "COLOR INFO:" in key: found_color_info = True # Check the name of the palette description - self.assertEqual(name_length, 2) + assert name_length == 2 # Check the name and unit name of the palette - self.assertEqual(unit_name, ' Test') + assert unit_name == " Test" # Check the palette description of the palette - self.assertEqual(names, [' test', ' test2']) + assert names == [" test", " test2"] self._read_back_mitiff_and_check(os.path.join(self.base_dir, filename), expected) def test_simple_write_two_bands(self): @@ -982,12 +982,12 @@ def test_get_test_dataset_three_bands_prereq(self): dataset = self._get_test_dataset_three_bands_prereq() w = MITIFFWriter(base_dir=self.base_dir) w.save_dataset(dataset) - filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs['name'], - dataset.attrs['start_time']) + filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs["name"], + dataset.attrs["start_time"]) imgdesc = self._imagedescription_from_mitiff(os.path.join(self.base_dir, filename)) for element in imgdesc: - if ' Channels:' in element: - self.assertEqual(element, ' Channels: 3 In this file: 1 2 3') + if " Channels:" in element: + assert element == " Channels: 3 In this file: 1 2 3" def test_save_dataset_with_calibration_error_one_dataset(self): """Test saving if mitiff as dataset with only one channel with invalid calibration.""" @@ -998,9 +998,9 @@ def test_save_dataset_with_calibration_error_one_dataset(self): logger.level = logging.DEBUG dataset = self._get_test_dataset_calibration_one_dataset() - prereqs = [make_dsq(name='4', calibration='not_valid_calibration_name')] - dataset.attrs['prerequisites'] = prereqs - w = MITIFFWriter(filename=dataset.attrs['metadata_requirements']['file_pattern'], base_dir=self.base_dir) + prereqs = [make_dsq(name="4", calibration="not_valid_calibration_name")] + dataset.attrs["prerequisites"] = prereqs + w = MITIFFWriter(filename=dataset.attrs["metadata_requirements"]["file_pattern"], base_dir=self.base_dir) _reverse_offset = 0. _reverse_scale = 1. _decimals = 2 @@ -1010,7 +1010,7 @@ def test_save_dataset_with_calibration_error_one_dataset(self): with self.assertLogs(logger) as lc: w._add_calibration_datasets(4, dataset, _reverse_offset, _reverse_scale, _decimals) for _op in lc.output: - self.assertIn("Unknown calib type. Must be Radiance, Reflectance or BT.", _op) + assert "Unknown calib type. Must be Radiance, Reflectance or BT." in _op finally: logger.removeHandler(stream_handler) @@ -1024,22 +1024,21 @@ def test_save_dataset_with_missing_palette(self): logger.setLevel(logging.DEBUG) dataset = self._get_test_one_dataset() - pal_desc = ['test', 'test2'] + pal_desc = ["test", "test2"] unit = "Test" - palette = {'palette': True, - 'palette_description': pal_desc, - 'palette_unit': unit, - 'palette_channel_name': dataset.attrs['name']} + palette = {"palette": True, + "palette_description": pal_desc, + "palette_unit": unit, + "palette_channel_name": dataset.attrs["name"]} w = MITIFFWriter(base_dir=self.base_dir) tiffinfo = {} - tiffinfo[270] = "Just dummy image desc".encode('utf-8') - filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs['name'], - dataset.attrs['start_time']) + tiffinfo[270] = "Just dummy image desc".encode("utf-8") + filename = "{:s}_{:%Y%m%d_%H%M%S}.mitiff".format(dataset.attrs["name"], + dataset.attrs["start_time"]) try: with self.assertLogs(logger, logging.ERROR) as lc: w._save_as_palette(dataset.compute(), os.path.join(self.base_dir, filename), tiffinfo, **palette) for _op in lc.output: - self.assertIn(("In a mitiff palette image a color map must be provided: " - "palette_color_map is missing."), _op) + assert "In a mitiff palette image a color map must be provided: palette_color_map is missing." in _op finally: logger.removeHandler(stream_handler) diff --git a/satpy/tests/writer_tests/test_ninjogeotiff.py b/satpy/tests/writer_tests/test_ninjogeotiff.py index ac75b68cbf..a9c60bdf90 100644 --- a/satpy/tests/writer_tests/test_ninjogeotiff.py +++ b/satpy/tests/writer_tests/test_ninjogeotiff.py @@ -463,8 +463,8 @@ def ntg_latlon(test_image_latlon): SatelliteNameID=654321) -@pytest.fixture -def patch_datetime_now(monkeypatch): +@pytest.fixture() +def _patch_datetime_now(monkeypatch): """Get a fake datetime.datetime.now().""" # Source: https://stackoverflow.com/a/20503374/974555, CC-BY-SA 4.0 @@ -477,7 +477,7 @@ def now(cls, tz=datetime.timezone.utc): return datetime.datetime(2033, 5, 18, 3, 33, 20, tzinfo=tz) - monkeypatch.setattr(datetime, 'datetime', mydatetime) + monkeypatch.setattr(datetime, "datetime", mydatetime) def test_write_and_read_file(test_image_small_mid_atlantic_L, tmp_path): @@ -741,11 +741,13 @@ def test_calc_single_tag_by_name(ntg1, ntg2, ntg3): assert ntg2.get_tag("DataType") == "GORN" assert ntg3.get_tag("DataType") == "PPRN" assert ntg1.get_tag("DataSource") == "dowsing rod" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unknown tag: invalid"): ntg1.get_tag("invalid") - with pytest.raises(ValueError): + with pytest.raises(ValueError, + match="Optional tag OriginalHeader must be supplied by user if user wants to request the value," + " but wasn't."): ntg1.get_tag("OriginalHeader") - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Tag Gradient is added later by the GeoTIFF writer."): ntg1.get_tag("Gradient") @@ -773,11 +775,12 @@ def test_get_color_depth(ntg1, ntg2, ntg3, ntg_weird, ntg_rgba, ntg_cmyk): assert ntg3.get_color_depth() == 8 # mode P assert ntg_weird.get_color_depth() == 16 # mode LA assert ntg_rgba.get_color_depth() == 32 # mode RGBA - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unsupported image mode: CMYK"): ntg_cmyk.get_color_depth() -def test_get_creation_date_id(ntg1, ntg2, ntg3, patch_datetime_now): +@pytest.mark.usefixtures("_patch_datetime_now") +def test_get_creation_date_id(ntg1, ntg2, ntg3): """Test getting the creation date ID. This is the time at which the file was created. @@ -887,7 +890,7 @@ def test_get_projection(ntg1, ntg2, ntg3, ntg_weird, ntg_rgba, ntg_cmyk, assert ntg_cmyk.get_projection() == "SPOL" assert ntg_rgba.get_projection() == "MERC" assert ntg_latlon.get_projection() == "PLAT" - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Unknown mapping from area .*"): ntg_weird.get_projection() @@ -898,7 +901,7 @@ def test_get_ref_lat_1(ntg1, ntg2, ntg3, ntg_weird, ntg_latlon): np.testing.assert_allclose(rl1, 0.0) np.testing.assert_allclose(ntg2.get_ref_lat_1(), 2.5) np.testing.assert_allclose(ntg3.get_ref_lat_1(), 75) - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="Could not find reference latitude for area test-area-north-stereo"): ntg_weird.get_ref_lat_1() with pytest.raises(AttributeError): ntg_latlon.get_ref_lat_1() @@ -945,7 +948,7 @@ def test_get_ymax(ntg1, ntg2, ntg3): def test_create_unknown_tags(test_image_small_arctic_P): """Test that unknown tags raise ValueError.""" from satpy.writers.ninjogeotiff import NinJoTagGenerator - with pytest.raises(ValueError): + with pytest.raises(ValueError, match="The following tags were not recognised: Locatie"): NinJoTagGenerator( test_image_small_arctic_P, 42, diff --git a/satpy/tests/writer_tests/test_ninjotiff.py b/satpy/tests/writer_tests/test_ninjotiff.py index 58f991e73d..ea6cf07f95 100644 --- a/satpy/tests/writer_tests/test_ninjotiff.py +++ b/satpy/tests/writer_tests/test_ninjotiff.py @@ -43,66 +43,65 @@ def get_scaling_from_history(self): pyninjotiff_mock.ninjotiff = mock.Mock() -@mock.patch.dict(sys.modules, {'pyninjotiff': pyninjotiff_mock, 'pyninjotiff.ninjotiff': pyninjotiff_mock.ninjotiff}) +@mock.patch.dict(sys.modules, {"pyninjotiff": pyninjotiff_mock, "pyninjotiff.ninjotiff": pyninjotiff_mock.ninjotiff}) class TestNinjoTIFFWriter(unittest.TestCase): """The ninjo tiff writer tests.""" - @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) + @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) def test_init(self): """Test the init.""" from satpy.writers.ninjotiff import NinjoTIFFWriter - ninjo_tags = {40000: 'NINJO'} + ninjo_tags = {40000: "NINJO"} ntw = NinjoTIFFWriter(tags=ninjo_tags) - self.assertDictEqual(ntw.tags, ninjo_tags) + assert ntw.tags == ninjo_tags - @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_dataset') - @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) + @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_dataset") + @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) def test_dataset(self, iwsd): """Test saving a dataset.""" from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() - dataset = xr.DataArray([1, 2, 3], attrs={'units': 'K'}) - with mock.patch('satpy.writers.ninjotiff.convert_units') as uconv: - ntw.save_dataset(dataset, physic_unit='CELSIUS') - uconv.assert_called_once_with(dataset, 'K', 'CELSIUS') - self.assertEqual(iwsd.call_count, 1) - - @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_dataset') - @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) + dataset = xr.DataArray([1, 2, 3], attrs={"units": "K"}) + with mock.patch("satpy.writers.ninjotiff.convert_units") as uconv: + ntw.save_dataset(dataset, physic_unit="CELSIUS") + uconv.assert_called_once_with(dataset, "K", "CELSIUS") + assert iwsd.call_count == 1 + + @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_dataset") + @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) def test_dataset_skip_unit_conversion(self, iwsd): """Test saving a dataset without unit conversion.""" from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() - dataset = xr.DataArray([1, 2, 3], attrs={'units': 'K'}) - with mock.patch('satpy.writers.ninjotiff.convert_units') as uconv: - ntw.save_dataset(dataset, physic_unit='CELSIUS', + dataset = xr.DataArray([1, 2, 3], attrs={"units": "K"}) + with mock.patch("satpy.writers.ninjotiff.convert_units") as uconv: + ntw.save_dataset(dataset, physic_unit="CELSIUS", convert_temperature_units=False) uconv.assert_not_called() - self.assertEqual(iwsd.call_count, 1) + assert iwsd.call_count == 1 - @mock.patch('satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset') - @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_image') - @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) + @mock.patch("satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset") + @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_image") + @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) def test_image(self, iwsi, save_dataset): """Test saving an image.""" nt = pyninjotiff_mock.ninjotiff nt.reset_mock() from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() - dataset = xr.DataArray([1, 2, 3], attrs={'units': 'K'}) - img = FakeImage(dataset, 'L') - ret = ntw.save_image(img, filename='bla.tif', compute=False) + dataset = xr.DataArray([1, 2, 3], attrs={"units": "K"}) + img = FakeImage(dataset, "L") + ret = ntw.save_image(img, filename="bla.tif", compute=False) nt.save.assert_called() - assert nt.save.mock_calls[0][2]['compute'] is False - assert nt.save.mock_calls[0][2]['ch_min_measurement_unit'] < nt.save.mock_calls[0][2]['ch_max_measurement_unit'] + assert nt.save.mock_calls[0][2]["compute"] is False + assert nt.save.mock_calls[0][2]["ch_min_measurement_unit"] < nt.save.mock_calls[0][2]["ch_max_measurement_unit"] assert ret == nt.save.return_value def test_convert_units_self(self): """Test that unit conversion to themselves do nothing.""" + from satpy.tests.utils import make_fake_scene from satpy.writers.ninjotiff import convert_units - from ..utils import make_fake_scene - # ensure that converting from % to itself does not change the data sc = make_fake_scene( {"VIS006": np.arange(25, dtype="f4").reshape(5, 5)}, @@ -115,9 +114,8 @@ def test_convert_units_self(self): def test_convert_units_temp(self): """Test that temperature unit conversions works as expected.""" # test converting between °C and K + from satpy.tests.utils import make_fake_scene from satpy.writers.ninjotiff import convert_units - - from ..utils import make_fake_scene sc = make_fake_scene( {"IR108": np.arange(25, dtype="f4").reshape(5, 5)}, common_attrs={"units": "K"}) @@ -134,9 +132,8 @@ def test_convert_units_temp(self): def test_convert_units_other(self): """Test that other unit conversions are not implemented.""" # test arbitrary different conversion + from satpy.tests.utils import make_fake_scene from satpy.writers.ninjotiff import convert_units - - from ..utils import make_fake_scene sc = make_fake_scene( {"rain_rate": np.arange(25, dtype="f8").reshape(5, 5)}, common_attrs={"units": "millimeter/hour"}) @@ -145,9 +142,9 @@ def test_convert_units_other(self): with pytest.raises(NotImplementedError): convert_units(ds_in, "millimeter/hour", "m/s") - @mock.patch('satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset') - @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_image') - @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) + @mock.patch("satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset") + @mock.patch("satpy.writers.ninjotiff.ImageWriter.save_image") + @mock.patch("satpy.writers.ninjotiff.nt", pyninjotiff_mock.ninjotiff) def test_P_image_is_uint8(self, iwsi, save_dataset): """Test that a P-mode image is converted to uint8s.""" nt = pyninjotiff_mock.ninjotiff @@ -155,6 +152,6 @@ def test_P_image_is_uint8(self, iwsi, save_dataset): from satpy.writers.ninjotiff import NinjoTIFFWriter ntw = NinjoTIFFWriter() dataset = xr.DataArray([1, 2, 3]).astype(int) - img = FakeImage(dataset, 'P') - ntw.save_image(img, filename='bla.tif', compute=False) + img = FakeImage(dataset, "P") + ntw.save_image(img, filename="bla.tif", compute=False) assert nt.save.mock_calls[0][1][0].data.dtype == np.uint8 diff --git a/satpy/tests/writer_tests/test_simple_image.py b/satpy/tests/writer_tests/test_simple_image.py index 5ebf0dfb47..01d89a22ad 100644 --- a/satpy/tests/writer_tests/test_simple_image.py +++ b/satpy/tests/writer_tests/test_simple_image.py @@ -44,9 +44,9 @@ def _get_test_datasets(): import xarray as xr ds1 = xr.DataArray( da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow()} + dims=("y", "x"), + attrs={"name": "test", + "start_time": datetime.utcnow()} ) return [ds1] @@ -72,6 +72,6 @@ def test_simple_delayed_write(self): w = PillowWriter(base_dir=self.base_dir) res = w.save_datasets(datasets, compute=False) for r__ in res: - self.assertIsInstance(r__, Delayed) + assert isinstance(r__, Delayed) r__.compute() compute_writer_results(res) diff --git a/satpy/tests/writer_tests/test_utils.py b/satpy/tests/writer_tests/test_utils.py index 10a199d8b0..e71e3a7e1e 100644 --- a/satpy/tests/writer_tests/test_utils.py +++ b/satpy/tests/writer_tests/test_utils.py @@ -27,9 +27,9 @@ class WriterUtilsTest(unittest.TestCase): def test_flatten_dict(self): """Test dictionary flattening.""" - d = {'a': 1, 'b': {'c': 1, 'd': {'e': 1, 'f': {'g': [1, 2]}}}} - expected = {'a': 1, - 'b_c': 1, - 'b_d_e': 1, - 'b_d_f_g': [1, 2]} - self.assertDictEqual(wutils.flatten_dict(d), expected) + d = {"a": 1, "b": {"c": 1, "d": {"e": 1, "f": {"g": [1, 2]}}}} + expected = {"a": 1, + "b_c": 1, + "b_d_e": 1, + "b_d_f_g": [1, 2]} + assert wutils.flatten_dict(d) == expected diff --git a/satpy/utils.py b/satpy/utils.py index 67150fed9d..f9ea05ca79 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -132,12 +132,12 @@ def logging_on(level=logging.WARNING): console = logging.StreamHandler() console.setFormatter(logging.Formatter("[%(levelname)s: %(asctime)s :" " %(name)s] %(message)s", - '%Y-%m-%d %H:%M:%S')) + "%Y-%m-%d %H:%M:%S")) console.setLevel(level) - logging.getLogger('').addHandler(console) + logging.getLogger("").addHandler(console) _is_logging_on = True - log = logging.getLogger('') + log = logging.getLogger("") log.setLevel(level) for h in log.handlers: h.setLevel(level) @@ -145,13 +145,13 @@ def logging_on(level=logging.WARNING): def logging_off(): """Turn logging off.""" - logging.getLogger('').handlers = [logging.NullHandler()] + logging.getLogger("").handlers = [logging.NullHandler()] def get_logger(name): """Return logger with null handler added if needed.""" - if not hasattr(logging.Logger, 'trace'): - logging.addLevelName(TRACE_LEVEL, 'TRACE') + if not hasattr(logging.Logger, "trace"): + logging.addLevelName(TRACE_LEVEL, "TRACE") def trace(self, message, *args, **kwargs): if self.isEnabledFor(TRACE_LEVEL): @@ -167,7 +167,7 @@ def trace(self, message, *args, **kwargs): def in_ipynb(): """Check if we are in a jupyter notebook.""" try: - return 'ZMQ' in get_ipython().__class__.__name__ + return "ZMQ" in get_ipython().__class__.__name__ except NameError: return False @@ -245,20 +245,20 @@ def proj_units_to_meters(proj_str): proj_parts = proj_str.split() new_parts = [] for itm in proj_parts: - key, val = itm.split('=') - key = key.strip('+') - if key in ['a', 'b', 'h']: + key, val = itm.split("=") + key = key.strip("+") + if key in ["a", "b", "h"]: val = float(val) if val < 6e6: val *= 1000. - val = '%.3f' % val + val = "%.3f" % val - if key == 'units' and val == 'km': + if key == "units" and val == "km": continue - new_parts.append('+%s=%s' % (key, val)) + new_parts.append("+%s=%s" % (key, val)) - return ' '.join(new_parts) + return " ".join(new_parts) def _get_sunz_corr_li_and_shibata(cos_zen): @@ -373,9 +373,9 @@ def _get_sat_altitude(data_arr, key_prefixes): try: alt = _get_first_available_item(orb_params, alt_keys) except KeyError: - alt = orb_params['projection_altitude'] + alt = orb_params["projection_altitude"] warnings.warn( - 'Actual satellite altitude not available, using projection altitude instead.', + "Actual satellite altitude not available, using projection altitude instead.", stacklevel=3 ) return alt @@ -389,10 +389,10 @@ def _get_sat_lonlat(data_arr, key_prefixes): lon = _get_first_available_item(orb_params, lon_keys) lat = _get_first_available_item(orb_params, lat_keys) except KeyError: - lon = orb_params['projection_longitude'] - lat = orb_params['projection_latitude'] + lon = orb_params["projection_longitude"] + lat = orb_params["projection_latitude"] warnings.warn( - 'Actual satellite lon/lat not available, using projection center instead.', + "Actual satellite lon/lat not available, using projection center instead.", stacklevel=3 ) return lon, lat @@ -456,21 +456,21 @@ def _check_yaml_configs(configs, key): diagnostic = {} for i in configs: for fname in i: - msg = 'ok' + msg = "ok" res = None - with open(fname, 'r', encoding='utf-8') as stream: + with open(fname, "r", encoding="utf-8") as stream: try: res = yaml.load(stream, Loader=UnsafeLoader) except yaml.YAMLError as err: stream.seek(0) res = yaml.load(stream, Loader=BaseLoader) - if err.context == 'while constructing a Python object': + if err.context == "while constructing a Python object": msg = err.problem else: - msg = 'error' + msg = "error" finally: try: - diagnostic[res[key]['name']] = msg + diagnostic[res[key]["name"]] = msg except (KeyError, TypeError): # this object doesn't have a 'name' pass @@ -483,7 +483,7 @@ def _check_import(module_names): for module_name in module_names: try: __import__(module_name) - res = 'ok' + res = "ok" except ImportError as err: res = str(err) diagnostics[module_name] = res @@ -505,24 +505,24 @@ def check_satpy(readers=None, writers=None, extras=None): from satpy.readers import configs_for_reader from satpy.writers import configs_for_writer - print('Readers') - print('=======') - for reader, res in sorted(_check_yaml_configs(configs_for_reader(reader=readers), 'reader').items()): - print(reader + ': ', res) - print() - - print('Writers') - print('=======') - for writer, res in sorted(_check_yaml_configs(configs_for_writer(writer=writers), 'writer').items()): - print(writer + ': ', res) - print() - - print('Extras') - print('======') - module_names = extras if extras is not None else ('cartopy', 'geoviews') + print("Readers") # noqa: T201 + print("=======") # noqa: T201 + for reader, res in sorted(_check_yaml_configs(configs_for_reader(reader=readers), "reader").items()): + print(reader + ": ", res) # noqa: T201 + print() # noqa: T201 + + print("Writers") # noqa: T201 + print("=======") # noqa: T201 + for writer, res in sorted(_check_yaml_configs(configs_for_writer(writer=writers), "writer").items()): + print(writer + ": ", res) # noqa: T201 + print() # noqa: T201 + + print("Extras") # noqa: T201 + print("======") # noqa: T201 + module_names = extras if extras is not None else ("cartopy", "geoviews") for module_name, res in sorted(_check_import(module_names).items()): - print(module_name + ': ', res) - print() + print(module_name + ": ", res) # noqa: T201 + print() # noqa: T201 def unify_chunks(*data_arrays: xr.DataArray) -> tuple[xr.DataArray, ...]: @@ -621,7 +621,7 @@ def get_legacy_chunk_size(): def _get_pytroll_chunk_size(): try: - chunk_size = int(os.environ['PYTROLL_CHUNK_SIZE']) + chunk_size = int(os.environ["PYTROLL_CHUNK_SIZE"]) warnings.warn( "The PYTROLL_CHUNK_SIZE environment variable is pending deprecation. " "You can use the dask config setting `array.chunk-size` (or the DASK_ARRAY__CHUNK_SIZE environment" @@ -747,7 +747,7 @@ def _sort_files_to_local_remote_and_fsfiles(filenames): fs_files.append(f) elif isinstance(f, pathlib.Path): local_files.append(f) - elif urlparse(f).scheme in ('', 'file') or "\\" in f: + elif urlparse(f).scheme in ("", "file") or "\\" in f: local_files.append(f) else: remote_files.append(f) @@ -788,7 +788,7 @@ def _get_storage_dictionary_options(reader_kwargs): # set base storage options if there are any storage_opt_dict[reader_name] = shared_storage_options.copy() if isinstance(rkwargs, dict) and "storage_options" in rkwargs: - storage_opt_dict.setdefault(reader_name, {}).update(rkwargs.pop('storage_options')) + storage_opt_dict.setdefault(reader_name, {}).update(rkwargs.pop("storage_options")) return storage_opt_dict diff --git a/satpy/writers/__init__.py b/satpy/writers/__init__.py index 0af433f28d..dcf482188d 100644 --- a/satpy/writers/__init__.py +++ b/satpy/writers/__init__.py @@ -44,18 +44,18 @@ def read_writer_config(config_files, loader=UnsafeLoader): """Read the writer `config_files` and return the info extracted.""" conf = {} - LOG.debug('Reading %s', str(config_files)) + LOG.debug("Reading %s", str(config_files)) for config_file in config_files: with open(config_file) as fd: conf.update(yaml.load(fd.read(), Loader=loader)) try: - writer_info = conf['writer'] + writer_info = conf["writer"] except KeyError: raise KeyError( "Malformed config file {}: missing writer 'writer'".format( config_files)) - writer_info['config_files'] = config_files + writer_info["config_files"] = config_files return writer_info @@ -63,7 +63,7 @@ def load_writer_configs(writer_configs, **writer_kwargs): """Load the writer from the provided `writer_configs`.""" try: writer_info = read_writer_config(writer_configs) - writer_class = writer_info['writer'] + writer_class = writer_info["writer"] except (ValueError, KeyError, yaml.YAMLError): raise ValueError("Invalid writer configs: " "'{}'".format(writer_configs)) @@ -78,11 +78,11 @@ def load_writer(writer, **writer_kwargs): config_fn = writer + ".yaml" if "." not in writer else writer config_files = config_search_paths(os.path.join("writers", config_fn)) writer_kwargs.setdefault("config_files", config_files) - if not writer_kwargs['config_files']: + if not writer_kwargs["config_files"]: raise ValueError("Unknown writer '{}'".format(writer)) try: - return load_writer_configs(writer_kwargs['config_files'], + return load_writer_configs(writer_kwargs["config_files"], **writer_kwargs) except ValueError: raise ValueError("Writer '{}' does not exist or could not be " @@ -102,15 +102,15 @@ def configs_for_writer(writer=None): if not isinstance(writer, (list, tuple)): writer = [writer] # given a config filename or writer name - config_files = [w if w.endswith('.yaml') else w + '.yaml' for w in writer] + config_files = [w if w.endswith(".yaml") else w + ".yaml" for w in writer] else: - paths = get_entry_points_config_dirs('satpy.writers') - writer_configs = glob_config(os.path.join('writers', '*.yaml'), search_dirs=paths) + paths = get_entry_points_config_dirs("satpy.writers") + writer_configs = glob_config(os.path.join("writers", "*.yaml"), search_dirs=paths) config_files = set(writer_configs) for config_file in config_files: config_basename = os.path.basename(config_file) - paths = get_entry_points_config_dirs('satpy.writers') + paths = get_entry_points_config_dirs("satpy.writers") writer_configs = config_search_paths( os.path.join("writers", config_basename), search_dirs=paths, @@ -143,7 +143,7 @@ def available_writers(as_dict=False): LOG.warning("Could not import writer config from: %s", writer_configs) LOG.debug("Error loading YAML", exc_info=True) continue - writers.append(writer_info if as_dict else writer_info['name']) + writers.append(writer_info if as_dict else writer_info["name"]) return writers @@ -231,11 +231,11 @@ def add_overlay(orig_img, area, coast_dir, color=None, width=None, resolution=No DeprecationWarning, stacklevel=2 ) - if hasattr(orig_img, 'convert'): + if hasattr(orig_img, "convert"): # image must be in RGB space to work with pycoast/pydecorate - res_mode = ('RGBA' if orig_img.final_mode(fill_value).endswith('A') else 'RGB') + res_mode = ("RGBA" if orig_img.final_mode(fill_value).endswith("A") else "RGB") orig_img = orig_img.convert(res_mode) - elif not orig_img.mode.startswith('RGB'): + elif not orig_img.mode.startswith("RGB"): raise RuntimeError("'trollimage' 1.6+ required to support adding " "overlays/decorations to non-RGB data.") @@ -244,7 +244,7 @@ def add_overlay(orig_img, area, coast_dir, color=None, width=None, resolution=No cw_ = ContourWriterAGG(coast_dir) new_image = orig_img.apply_pil(_burn_overlay, res_mode, - None, {'fill_value': fill_value}, + None, {"fill_value": fill_value}, (area, cw_, overlays), None) return new_image @@ -253,25 +253,25 @@ def _create_overlays_dict(color, width, grid, level_coast, level_borders): """Fill in the overlays dict.""" overlays = dict() # fill with sensible defaults - general_params = {'outline': color or (0, 0, 0), - 'width': width or 0.5} + general_params = {"outline": color or (0, 0, 0), + "width": width or 0.5} for key, val in general_params.items(): if val is not None: - overlays.setdefault('coasts', {}).setdefault(key, val) - overlays.setdefault('borders', {}).setdefault(key, val) + overlays.setdefault("coasts", {}).setdefault(key, val) + overlays.setdefault("borders", {}).setdefault(key, val) if level_coast is None: level_coast = 1 - overlays.setdefault('coasts', {}).setdefault('level', level_coast) + overlays.setdefault("coasts", {}).setdefault("level", level_coast) if level_borders is None: level_borders = 1 - overlays.setdefault('borders', {}).setdefault('level', level_borders) + overlays.setdefault("borders", {}).setdefault("level", level_borders) if grid is not None: - if 'major_lonlat' in grid and grid['major_lonlat']: - major_lonlat = grid.pop('major_lonlat') - minor_lonlat = grid.pop('minor_lonlat', major_lonlat) - grid.update({'Dlonlat': major_lonlat, 'dlonlat': minor_lonlat}) + if "major_lonlat" in grid and grid["major_lonlat"]: + major_lonlat = grid.pop("major_lonlat") + minor_lonlat = grid.pop("minor_lonlat", major_lonlat) + grid.update({"Dlonlat": major_lonlat, "dlonlat": minor_lonlat}) for key, val in grid.items(): - overlays.setdefault('grid', {}).setdefault(key, val) + overlays.setdefault("grid", {}).setdefault(key, val) return overlays @@ -288,10 +288,10 @@ def add_text(orig, dc, img, text): arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) - new_data = xr.DataArray(arr, dims=['y', 'x', 'bands'], - coords={'y': orig.data.coords['y'], - 'x': orig.data.coords['x'], - 'bands': list(img.mode)}, + new_data = xr.DataArray(arr, dims=["y", "x", "bands"], + coords={"y": orig.data.coords["y"], + "x": orig.data.coords["x"], + "bands": list(img.mode)}, attrs=orig.data.attrs) return XRImage(new_data) @@ -309,10 +309,10 @@ def add_logo(orig, dc, img, logo): arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) - new_data = xr.DataArray(arr, dims=['y', 'x', 'bands'], - coords={'y': orig.data.coords['y'], - 'x': orig.data.coords['x'], - 'bands': list(img.mode)}, + new_data = xr.DataArray(arr, dims=["y", "x", "bands"], + coords={"y": orig.data.coords["y"], + "x": orig.data.coords["x"], + "bands": list(img.mode)}, attrs=orig.data.attrs) return XRImage(new_data) @@ -330,10 +330,10 @@ def add_scale(orig, dc, img, scale): arr = da.from_array(np.array(img) / 255.0, chunks=CHUNK_SIZE) - new_data = xr.DataArray(arr, dims=['y', 'x', 'bands'], - coords={'y': orig.data.coords['y'], - 'x': orig.data.coords['x'], - 'bands': list(img.mode)}, + new_data = xr.DataArray(arr, dims=["y", "x", "bands"], + coords={"y": orig.data.coords["y"], + "x": orig.data.coords["x"], + "bands": list(img.mode)}, attrs=orig.data.attrs) return XRImage(new_data) @@ -373,10 +373,10 @@ def add_decorate(orig, fill_value=None, **decorate): # Need to create this here to possible keep the alignment # when adding text and/or logo with pydecorate - if hasattr(orig, 'convert'): + if hasattr(orig, "convert"): # image must be in RGB space to work with pycoast/pydecorate - orig = orig.convert('RGBA' if orig.mode.endswith('A') else 'RGB') - elif not orig.mode.startswith('RGB'): + orig = orig.convert("RGBA" if orig.mode.endswith("A") else "RGB") + elif not orig.mode.startswith("RGB"): raise RuntimeError("'trollimage' 1.6+ required to support adding " "overlays/decorations to non-RGB data.") img_orig = orig.pil_image(fill_value=fill_value) @@ -386,14 +386,14 @@ def add_decorate(orig, fill_value=None, **decorate): # decorate need to be a list to maintain the alignment # as ordered in the list img = orig - if 'decorate' in decorate: - for dec in decorate['decorate']: - if 'logo' in dec: - img = add_logo(img, dc, img_orig, logo=dec['logo']) - elif 'text' in dec: - img = add_text(img, dc, img_orig, text=dec['text']) - elif 'scale' in dec: - img = add_scale(img, dc, img_orig, scale=dec['scale']) + if "decorate" in decorate: + for dec in decorate["decorate"]: + if "logo" in dec: + img = add_logo(img, dc, img_orig, logo=dec["logo"]) + elif "text" in dec: + img = add_text(img, dc, img_orig, text=dec["text"]) + elif "scale" in dec: + img = add_scale(img, dc, img_orig, scale=dec["scale"]) return img @@ -445,7 +445,7 @@ def get_enhanced_image(dataset, enhance=None, overlay=None, decorate=None, enhancer.apply(img, **dataset.attrs) if overlay is not None: - img = add_overlay(img, dataset.attrs['area'], fill_value=fill_value, **overlay) + img = add_overlay(img, dataset.attrs["area"], fill_value=fill_value, **overlay) if decorate is not None: img = add_decorate(img, fill_value=fill_value, **decorate) @@ -595,7 +595,7 @@ def compute_writer_results(results): if targets: for target in targets: - if hasattr(target, 'close'): + if hasattr(target, "close"): target.close() @@ -632,23 +632,23 @@ def __init__(self, name=None, filename=None, base_dir=None, **kwargs): """ # Load the config Plugin.__init__(self, **kwargs) - self.info = self.config.get('writer', {}) + self.info = self.config.get("writer", {}) - if 'file_pattern' in self.info: + if "file_pattern" in self.info: warnings.warn( "Writer YAML config is using 'file_pattern' which " "has been deprecated, use 'filename' instead.", stacklevel=2 ) - self.info['filename'] = self.info.pop('file_pattern') + self.info["filename"] = self.info.pop("file_pattern") - if 'file_pattern' in kwargs: + if "file_pattern" in kwargs: warnings.warn( "'file_pattern' has been deprecated, use 'filename' instead.", DeprecationWarning, stacklevel=2 ) - filename = kwargs.pop('file_pattern') + filename = kwargs.pop("file_pattern") # Use options from the config file if they weren't passed as arguments self.name = self.info.get("name", None) if name is None else name @@ -679,7 +679,7 @@ def separate_init_kwargs(cls, kwargs): # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs = {} kwargs = kwargs.copy() - for kw in ['base_dir', 'filename', 'file_pattern']: + for kw in ["base_dir", "filename", "file_pattern"]: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) return init_kwargs, kwargs @@ -696,8 +696,8 @@ def create_filename_parser(self, base_dir): @staticmethod def _prepare_metadata_for_filename_formatting(attrs): - if isinstance(attrs.get('sensor'), set): - attrs['sensor'] = '-'.join(sorted(attrs['sensor'])) + if isinstance(attrs.get("sensor"), set): + attrs["sensor"] = "-".join(sorted(attrs["sensor"])) def get_filename(self, **kwargs): """Create a filename where output data will be saved. @@ -863,7 +863,7 @@ def separate_init_kwargs(cls, kwargs): """Separate the init kwargs.""" # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs, kwargs = super(ImageWriter, cls).separate_init_kwargs(kwargs) - for kw in ['enhancement_config', 'enhance']: + for kw in ["enhancement_config", "enhance"]: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) return init_kwargs, kwargs @@ -1179,7 +1179,7 @@ def __init__(self, enhancement_config_file=None): # it wasn't specified in the config or in the kwargs, we should # provide a default config_fn = os.path.join("enhancements", "generic.yaml") - paths = get_entry_points_config_dirs('satpy.enhancements') + paths = get_entry_points_config_dirs("satpy.enhancements") self.enhancement_config_file = config_search_paths(config_fn, search_dirs=paths) if not self.enhancement_config_file: @@ -1199,7 +1199,7 @@ def get_sensor_enhancement_config(self, sensor): # one single sensor sensor = [sensor] - paths = get_entry_points_config_dirs('satpy.enhancements') + paths = get_entry_points_config_dirs("satpy.enhancements") for sensor_name in sensor: config_fn = os.path.join("enhancements", sensor_name + ".yaml") config_files = config_search_paths(config_fn, search_dirs=paths) @@ -1227,8 +1227,8 @@ def apply(self, img, **info): backup_id = f"" data_id = info.get("_satpy_id", backup_id) LOG.debug(f"Data for {data_id} will be enhanced with options:\n\t{enh_kwargs['operations']}") - for operation in enh_kwargs['operations']: - fun = operation['method'] - args = operation.get('args', []) - kwargs = operation.get('kwargs', {}) + for operation in enh_kwargs["operations"]: + fun = operation["method"] + args = operation.get("args", []) + kwargs = operation.get("kwargs", {}) fun(img, *args, **kwargs) diff --git a/satpy/writers/awips_tiled.py b/satpy/writers/awips_tiled.py index 4b7f32d1df..5f10418e8a 100644 --- a/satpy/writers/awips_tiled.py +++ b/satpy/writers/awips_tiled.py @@ -233,25 +233,25 @@ from satpy.writers import DecisionTree, Enhancer, Writer, get_enhanced_image LOG = logging.getLogger(__name__) -DEFAULT_OUTPUT_PATTERN = '{source_name}_AII_{platform_name}_{sensor}_' \ - '{name}_{sector_id}_{tile_id}_' \ - '{start_time:%Y%m%d_%H%M}.nc' +DEFAULT_OUTPUT_PATTERN = "{source_name}_AII_{platform_name}_{sensor}_" \ + "{name}_{sector_id}_{tile_id}_" \ + "{start_time:%Y%m%d_%H%M}.nc" UNIT_CONV = { - 'micron': 'microm', - 'mm h-1': 'mm/h', - '1': '*1', - 'none': '*1', - 'percent': '%', - 'Kelvin': 'kelvin', - 'K': 'kelvin', + "micron": "microm", + "mm h-1": "mm/h", + "1": "*1", + "none": "*1", + "percent": "%", + "Kelvin": "kelvin", + "K": "kelvin", } -TileInfo = namedtuple('TileInfo', ['tile_count', 'image_shape', 'tile_shape', - 'tile_row_offset', 'tile_column_offset', 'tile_id', - 'tile_number', - 'x', 'y', 'xy_factors', 'tile_slices', 'data_slices']) -XYFactors = namedtuple('XYFactors', ['mx', 'bx', 'my', 'by']) +TileInfo = namedtuple("TileInfo", ["tile_count", "image_shape", "tile_shape", + "tile_row_offset", "tile_column_offset", "tile_id", + "tile_number", + "x", "y", "xy_factors", "tile_slices", "data_slices"]) +XYFactors = namedtuple("XYFactors", ["mx", "bx", "my", "by"]) def fix_awips_file(fn): @@ -265,9 +265,9 @@ def fix_awips_file(fn): # of NetCDF LOG.info("Modifying output NetCDF file to work with AWIPS") import h5py - h = h5py.File(fn, 'a') - if '_NCProperties' in h.attrs: - del h.attrs['_NCProperties'] + h = h5py.File(fn, "a") + if "_NCProperties" in h.attrs: + del h.attrs["_NCProperties"] h.close() @@ -604,12 +604,12 @@ def _generate_tile_info(self): def _get_factor_offset_fill(input_data_arr, vmin, vmax, encoding): - dtype_str = encoding['dtype'] + dtype_str = encoding["dtype"] dtype = np.dtype(getattr(np, dtype_str)) file_bit_depth = dtype.itemsize * 8 - unsigned_in_signed = encoding.get('_Unsigned') == "true" - is_unsigned = dtype.kind == 'u' - bit_depth = input_data_arr.attrs.get('bit_depth', file_bit_depth) + unsigned_in_signed = encoding.get("_Unsigned") == "true" + is_unsigned = dtype.kind == "u" + bit_depth = input_data_arr.attrs.get("bit_depth", file_bit_depth) num_fills = 1 # future: possibly support more than one fill value if bit_depth is None: bit_depth = file_bit_depth @@ -666,7 +666,7 @@ def _add_valid_ranges(data_arrs): # we don't want to effect the original attrs data_arr = data_arr.copy(deep=False) # these are dask arrays, they need to get computed later - data_arr.attrs['valid_range'] = (vmin, vmax) + data_arr.attrs["valid_range"] = (vmin, vmax) yield data_arr @@ -676,7 +676,7 @@ class AWIPSTiledVariableDecisionTree(DecisionTree): def __init__(self, decision_dicts, **kwargs): """Initialize decision tree with specific keys to look for.""" # Fields used to match a product object to it's correct configuration - attrs = kwargs.pop('attrs', + attrs = kwargs.pop("attrs", ["name", "standard_name", "satellite", @@ -693,30 +693,30 @@ class NetCDFTemplate: def __init__(self, template_dict): """Parse template dictionary and prepare for rendering.""" - self.is_single_variable = template_dict.get('single_variable', False) - self.global_attributes = template_dict.get('global_attributes', {}) + self.is_single_variable = template_dict.get("single_variable", False) + self.global_attributes = template_dict.get("global_attributes", {}) default_var_config = { "default": { "encoding": {"dtype": "uint16"}, } } - self.variables = template_dict.get('variables', default_var_config) + self.variables = template_dict.get("variables", default_var_config) default_coord_config = { "default": { "encoding": {"dtype": "uint16"}, } } - self.coordinates = template_dict.get('coordinates', default_coord_config) + self.coordinates = template_dict.get("coordinates", default_coord_config) self._var_tree = AWIPSTiledVariableDecisionTree([self.variables]) self._coord_tree = AWIPSTiledVariableDecisionTree([self.coordinates]) - self._filename_format_str = template_dict.get('filename') + self._filename_format_str = template_dict.get("filename") self._str_formatter = StringFormatter() self._template_dict = template_dict - def get_filename(self, base_dir='', **kwargs): + def get_filename(self, base_dir="", **kwargs): """Generate output NetCDF file from metadata.""" # format the filename if self._filename_format_str is None: @@ -794,7 +794,7 @@ def get_attr_value(self, attr_name, input_metadata, value=None, raw_key=None, ra if func is not None: value = func(input_metadata) if value is None: - LOG.debug('no routine matching %s', meth_name) + LOG.debug("no routine matching %s", meth_name) return value def _render_attrs(self, attr_configs, input_metadata, prefix="_"): @@ -814,28 +814,28 @@ def _render_global_attributes(self, input_metadata): prefix="_global_") def _render_variable_attributes(self, var_config, input_metadata): - attr_configs = var_config['attributes'] + attr_configs = var_config["attributes"] var_attrs = self._render_attrs(attr_configs, input_metadata, prefix="_data_") return var_attrs def _render_coordinate_attributes(self, coord_config, input_metadata): - attr_configs = coord_config['attributes'] + attr_configs = coord_config["attributes"] coord_attrs = self._render_attrs(attr_configs, input_metadata, prefix="_coord_") return coord_attrs def _render_variable_encoding(self, var_config, input_data_arr): new_encoding = input_data_arr.encoding.copy() # determine fill value and - if 'encoding' in var_config: - new_encoding.update(var_config['encoding']) + if "encoding" in var_config: + new_encoding.update(var_config["encoding"]) if "dtype" not in new_encoding: - new_encoding['dtype'] = 'int16' - new_encoding['_Unsigned'] = 'true' + new_encoding["dtype"] = "int16" + new_encoding["_Unsigned"] = "true" return new_encoding def _render_variable(self, data_arr): var_config = self._var_tree.find_match(**data_arr.attrs) - new_var_name = var_config.get('var_name', data_arr.attrs['name']) + new_var_name = var_config.get("var_name", data_arr.attrs["name"]) new_data_arr = data_arr.copy() # remove coords which may cause issues later on new_data_arr = new_data_arr.reset_coords(drop=True) @@ -848,8 +848,8 @@ def _render_variable(self, data_arr): def _get_matchable_coordinate_metadata(self, coord_name, coord_attrs): match_kwargs = {} - if 'name' not in coord_attrs: - match_kwargs['name'] = coord_name + if "name" not in coord_attrs: + match_kwargs["name"] = coord_name match_kwargs.update(coord_attrs) return match_kwargs @@ -897,29 +897,29 @@ def __init__(self, template_dict, swap_end_time=False): def _swap_attributes_end_time(self, template_dict): """Swap every use of 'start_time' to use 'end_time' instead.""" - variable_attributes = [var_section['attributes'] for var_section in template_dict.get('variables', {}).values()] - global_attributes = template_dict.get('global_attributes', {}) + variable_attributes = [var_section["attributes"] for var_section in template_dict.get("variables", {}).values()] + global_attributes = template_dict.get("global_attributes", {}) for attr_section in variable_attributes + [global_attributes]: for attr_name in attr_section: attr_config = attr_section[attr_name] - if '{start_time' in attr_config.get('value', ''): - attr_config['value'] = attr_config['value'].replace('{start_time', '{end_time') - if attr_config.get('raw_key', '') == 'start_time': - attr_config['raw_key'] = 'end_time' + if "{start_time" in attr_config.get("value", ""): + attr_config["value"] = attr_config["value"].replace("{start_time", "{end_time") + if attr_config.get("raw_key", "") == "start_time": + attr_config["raw_key"] = "end_time" def _data_units(self, input_metadata): - units = input_metadata.get('units', '1') + units = input_metadata.get("units", "1") # we *know* AWIPS can't handle some units return UNIT_CONV.get(units, units) def _global_start_date_time(self, input_metadata): - start_time = input_metadata['start_time'] + start_time = input_metadata["start_time"] if self._swap_end_time: - start_time = input_metadata['end_time'] + start_time = input_metadata["end_time"] return start_time.strftime("%Y-%m-%dT%H:%M:%S") def _global_awips_id(self, input_metadata): - return "AWIPS_" + input_metadata['name'] + return "AWIPS_" + input_metadata["name"] def _global_physical_element(self, input_metadata): var_config = self._var_tree.find_match(**input_metadata) @@ -930,11 +930,11 @@ def _global_physical_element(self, input_metadata): def _global_production_location(self, input_metadata): """Get default global production_location attribute.""" del input_metadata - org = os.environ.get('ORGANIZATION', None) + org = os.environ.get("ORGANIZATION", None) if org is not None: prod_location = org else: - LOG.warning('environment ORGANIZATION not set for .production_location attribute, using hostname') + LOG.warning("environment ORGANIZATION not set for .production_location attribute, using hostname") import socket prod_location = socket.gethostname() # FUTURE: something more correct but this will do for now @@ -954,25 +954,25 @@ def _global_production_location(self, input_metadata): @staticmethod def _get_vmin_vmax(var_config, input_data_arr): - if 'valid_range' in var_config: - return var_config['valid_range'] + if "valid_range" in var_config: + return var_config["valid_range"] data_vmin, data_vmax = _get_data_vmin_vmax(input_data_arr) return data_vmin, data_vmax def _render_variable_encoding(self, var_config, input_data_arr): new_encoding = super()._render_variable_encoding(var_config, input_data_arr) vmin, vmax = self._get_vmin_vmax(var_config, input_data_arr) - has_flag_meanings = 'flag_meanings' in input_data_arr.attrs + has_flag_meanings = "flag_meanings" in input_data_arr.attrs is_int = np.issubdtype(input_data_arr.dtype, np.integer) is_cat = has_flag_meanings or is_int - has_sf = new_encoding.get('scale_factor') is not None + has_sf = new_encoding.get("scale_factor") is not None if not has_sf and is_cat: # AWIPS doesn't like Identity conversion so we can't have # a factor of 1 and an offset of 0 # new_encoding['scale_factor'] = None # new_encoding['add_offset'] = None - if '_FillValue' in input_data_arr.attrs: - new_encoding['_FillValue'] = input_data_arr.attrs['_FillValue'] + if "_FillValue" in input_data_arr.attrs: + new_encoding["_FillValue"] = input_data_arr.attrs["_FillValue"] elif not has_sf and vmin is not None and vmax is not None: # calculate scale_factor and add_offset sf, ao, fill = _get_factor_offset_fill( @@ -980,57 +980,57 @@ def _render_variable_encoding(self, var_config, input_data_arr): ) # NOTE: These could be dask arrays that will be computed later # when we go to write the files. - new_encoding['scale_factor'] = sf - new_encoding['add_offset'] = ao - new_encoding['_FillValue'] = fill - new_encoding['coordinates'] = ' '.join([ele for ele in input_data_arr.dims]) + new_encoding["scale_factor"] = sf + new_encoding["add_offset"] = ao + new_encoding["_FillValue"] = fill + new_encoding["coordinates"] = " ".join([ele for ele in input_data_arr.dims]) return new_encoding def _get_projection_attrs(self, area_def): """Assign projection attributes per CF standard.""" proj_attrs = area_def.crs.to_cf() proj_encoding = {"dtype": "i4"} - proj_attrs['short_name'] = area_def.area_id - gmap_name = proj_attrs['grid_mapping_name'] + proj_attrs["short_name"] = area_def.area_id + gmap_name = proj_attrs["grid_mapping_name"] preferred_names = { - 'geostationary': 'fixedgrid_projection', - 'lambert_conformal_conic': 'lambert_projection', - 'polar_stereographic': 'polar_projection', - 'mercator': 'mercator_projection', + "geostationary": "fixedgrid_projection", + "lambert_conformal_conic": "lambert_projection", + "polar_stereographic": "polar_projection", + "mercator": "mercator_projection", } if gmap_name not in preferred_names: LOG.warning("Data is in projection %s which may not be supported " "by AWIPS", gmap_name) - area_id_as_var_name = area_def.area_id.replace('-', '_').lower() + area_id_as_var_name = area_def.area_id.replace("-", "_").lower() proj_name = preferred_names.get(gmap_name, area_id_as_var_name) return proj_name, proj_attrs, proj_encoding def _set_xy_coords_attrs(self, new_ds, crs): - y_attrs = new_ds.coords['y'].attrs + y_attrs = new_ds.coords["y"].attrs if crs.is_geographic: - self._fill_units_and_standard_name(y_attrs, 'degrees_north', 'latitude') + self._fill_units_and_standard_name(y_attrs, "degrees_north", "latitude") else: - self._fill_units_and_standard_name(y_attrs, 'meters', 'projection_y_coordinate') - y_attrs['axis'] = 'Y' + self._fill_units_and_standard_name(y_attrs, "meters", "projection_y_coordinate") + y_attrs["axis"] = "Y" - x_attrs = new_ds.coords['x'].attrs + x_attrs = new_ds.coords["x"].attrs if crs.is_geographic: - self._fill_units_and_standard_name(x_attrs, 'degrees_east', 'longitude') + self._fill_units_and_standard_name(x_attrs, "degrees_east", "longitude") else: - self._fill_units_and_standard_name(x_attrs, 'meters', 'projection_x_coordinate') - x_attrs['axis'] = 'X' + self._fill_units_and_standard_name(x_attrs, "meters", "projection_x_coordinate") + x_attrs["axis"] = "X" @staticmethod def _fill_units_and_standard_name(attrs, units, standard_name): """Fill in units and standard_name if not set in `attrs`.""" - if attrs.get('units') is None: - attrs['units'] = units - if attrs['units'] in ('meter', 'metre'): + if attrs.get("units") is None: + attrs["units"] = units + if attrs["units"] in ("meter", "metre"): # AWIPS doesn't like 'meter' - attrs['units'] = 'meters' - if attrs.get('standard_name') is None: - attrs['standard_name'] = standard_name + attrs["units"] = "meters" + if attrs.get("standard_name") is None: + attrs["standard_name"] = standard_name def apply_area_def(self, new_ds, area_def): """Apply information we can gather from the AreaDefinition.""" @@ -1040,25 +1040,25 @@ def apply_area_def(self, new_ds, area_def): new_ds[gmap_name] = gmap_data_arr self._set_xy_coords_attrs(new_ds, area_def.crs) for data_arr in new_ds.data_vars.values(): - if 'y' in data_arr.dims and 'x' in data_arr.dims: - data_arr.attrs['grid_mapping'] = gmap_name + if "y" in data_arr.dims and "x" in data_arr.dims: + data_arr.attrs["grid_mapping"] = gmap_name - new_ds.attrs['pixel_x_size'] = area_def.pixel_size_x / 1000.0 - new_ds.attrs['pixel_y_size'] = area_def.pixel_size_y / 1000.0 + new_ds.attrs["pixel_x_size"] = area_def.pixel_size_x / 1000.0 + new_ds.attrs["pixel_y_size"] = area_def.pixel_size_y / 1000.0 return new_ds def apply_tile_coord_encoding(self, new_ds, xy_factors): """Add encoding information specific to the coordinate variables.""" - if 'x' in new_ds.coords: - new_ds.coords['x'].encoding['dtype'] = 'int16' - new_ds.coords['x'].encoding['scale_factor'] = np.float64(xy_factors.mx) - new_ds.coords['x'].encoding['add_offset'] = np.float64(xy_factors.bx) - new_ds.coords['x'].encoding['_FillValue'] = -1 - if 'y' in new_ds.coords: - new_ds.coords['y'].encoding['dtype'] = 'int16' - new_ds.coords['y'].encoding['scale_factor'] = np.float64(xy_factors.my) - new_ds.coords['y'].encoding['add_offset'] = np.float64(xy_factors.by) - new_ds.coords['y'].encoding['_FillValue'] = -1 + if "x" in new_ds.coords: + new_ds.coords["x"].encoding["dtype"] = "int16" + new_ds.coords["x"].encoding["scale_factor"] = np.float64(xy_factors.mx) + new_ds.coords["x"].encoding["add_offset"] = np.float64(xy_factors.bx) + new_ds.coords["x"].encoding["_FillValue"] = -1 + if "y" in new_ds.coords: + new_ds.coords["y"].encoding["dtype"] = "int16" + new_ds.coords["y"].encoding["scale_factor"] = np.float64(xy_factors.my) + new_ds.coords["y"].encoding["add_offset"] = np.float64(xy_factors.by) + new_ds.coords["y"].encoding["_FillValue"] = -1 return new_ds def apply_tile_info(self, new_ds, tile_info): @@ -1067,25 +1067,25 @@ def apply_tile_info(self, new_ds, tile_info): total_pixels = tile_info.image_shape tile_row = tile_info.tile_row_offset tile_column = tile_info.tile_column_offset - tile_height = new_ds.sizes['y'] - tile_width = new_ds.sizes['x'] - new_ds.attrs['tile_row_offset'] = tile_row - new_ds.attrs['tile_column_offset'] = tile_column - new_ds.attrs['product_tile_height'] = tile_height - new_ds.attrs['product_tile_width'] = tile_width - new_ds.attrs['number_product_tiles'] = total_tiles[0] * total_tiles[1] - new_ds.attrs['product_rows'] = total_pixels[0] - new_ds.attrs['product_columns'] = total_pixels[1] + tile_height = new_ds.sizes["y"] + tile_width = new_ds.sizes["x"] + new_ds.attrs["tile_row_offset"] = tile_row + new_ds.attrs["tile_column_offset"] = tile_column + new_ds.attrs["product_tile_height"] = tile_height + new_ds.attrs["product_tile_width"] = tile_width + new_ds.attrs["number_product_tiles"] = total_tiles[0] * total_tiles[1] + new_ds.attrs["product_rows"] = total_pixels[0] + new_ds.attrs["product_columns"] = total_pixels[1] return new_ds def _add_sector_id_global(self, new_ds, sector_id): - if not self._template_dict.get('add_sector_id_global'): + if not self._template_dict.get("add_sector_id_global"): return if sector_id is None: raise ValueError("Keyword 'sector_id' is required for this " "template.") - new_ds.attrs['sector_id'] = sector_id + new_ds.attrs["sector_id"] = sector_id def apply_misc_metadata(self, new_ds, sector_id=None, creator=None, creation_time=None): """Add attributes that don't fit into any other category.""" @@ -1095,9 +1095,9 @@ def apply_misc_metadata(self, new_ds, sector_id=None, creator=None, creation_tim creation_time = datetime.utcnow() self._add_sector_id_global(new_ds, sector_id) - new_ds.attrs['Conventions'] = "CF-1.7" - new_ds.attrs['creator'] = creator - new_ds.attrs['creation_time'] = creation_time.strftime('%Y-%m-%dT%H:%M:%S') + new_ds.attrs["Conventions"] = "CF-1.7" + new_ds.attrs["creator"] = creator + new_ds.attrs["creation_time"] = creation_time.strftime("%Y-%m-%dT%H:%M:%S") return new_ds def _render_variable_attributes(self, var_config, input_metadata): @@ -1128,7 +1128,7 @@ def render(self, dataset_or_data_arrays, area_def, def _notnull(data_arr, check_categories=True): is_int = np.issubdtype(data_arr.dtype, np.integer) - fill_value = data_arr.encoding.get('_FillValue', data_arr.attrs.get('_FillValue')) + fill_value = data_arr.encoding.get("_FillValue", data_arr.attrs.get("_FillValue")) if is_int and fill_value is not None: # some DQF datasets are always valid if check_categories: @@ -1178,7 +1178,7 @@ def _copy_to_existing(dataset_to_save, output_filename): new_data[valid_current] = var_data_arr.data[valid_current] var_data_arr.data[:] = new_data var_data_arr.encoding.update(existing_data_arr.encoding) - var_data_arr.encoding.pop('source', None) + var_data_arr.encoding.pop("source", None) return dataset_to_save @@ -1187,10 +1187,10 @@ def _extract_factors(dataset_to_save): factors = {} for data_var in dataset_to_save.data_vars.values(): enc = data_var.encoding - data_var.attrs.pop('valid_range', None) - factor_set = (enc.pop('scale_factor', None), - enc.pop('add_offset', None), - enc.pop('_FillValue', None)) + data_var.attrs.pop("valid_range", None) + factor_set = (enc.pop("scale_factor", None), + enc.pop("add_offset", None), + enc.pop("_FillValue", None)) factors[data_var.name] = factor_set return factors @@ -1199,11 +1199,11 @@ def _reapply_factors(dataset_to_save, factors): for var_name, factor_set in factors.items(): data_arr = dataset_to_save[var_name] if factor_set[0] is not None: - data_arr.encoding['scale_factor'] = factor_set[0] + data_arr.encoding["scale_factor"] = factor_set[0] if factor_set[1] is not None: - data_arr.encoding['add_offset'] = factor_set[1] + data_arr.encoding["add_offset"] = factor_set[1] if factor_set[2] is not None: - data_arr.encoding['_FillValue'] = factor_set[2] + data_arr.encoding["_FillValue"] = factor_set[2] return dataset_to_save @@ -1228,9 +1228,9 @@ def to_nonempty_netcdf(dataset_to_save: xr.Dataset, # TODO: Allow for new variables to be created if update_existing and os.path.isfile(output_filename): dataset_to_save = _copy_to_existing(dataset_to_save, output_filename) - mode = 'a' + mode = "a" else: - mode = 'w' + mode = "w" return dataset_to_save, output_filename, mode # return dataset_to_save.to_netcdf(output_filename, mode=mode) # if fix_awips: @@ -1258,9 +1258,9 @@ class AWIPSTiledWriter(Writer): def __init__(self, compress=False, fix_awips=False, **kwargs): """Initialize writer and decision trees.""" super(AWIPSTiledWriter, self).__init__(default_config_filename="writers/awips_tiled.yaml", **kwargs) - self.base_dir = kwargs.get('base_dir', '') - self.awips_sectors = self.config['sectors'] - self.templates = self.config['templates'] + self.base_dir = kwargs.get("base_dir", "") + self.awips_sectors = self.config["sectors"] + self.templates = self.config["templates"] self.compress = compress self.fix_awips = fix_awips self._fill_sector_info() @@ -1289,7 +1289,7 @@ def separate_init_kwargs(cls, kwargs): # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs, kwargs = super(AWIPSTiledWriter, cls).separate_init_kwargs( kwargs) - for kw in ['compress', 'fix_awips']: + for kw in ["compress", "fix_awips"]: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) @@ -1298,16 +1298,16 @@ def separate_init_kwargs(cls, kwargs): def _fill_sector_info(self): """Convert sector extents if needed.""" for sector_info in self.awips_sectors.values(): - sector_info['projection'] = CRS.from_user_input(sector_info['projection']) - p = Proj(sector_info['projection']) - if 'lower_left_xy' in sector_info: - sector_info['lower_left_lonlat'] = p(*sector_info['lower_left_xy'], inverse=True) + sector_info["projection"] = CRS.from_user_input(sector_info["projection"]) + p = Proj(sector_info["projection"]) + if "lower_left_xy" in sector_info: + sector_info["lower_left_lonlat"] = p(*sector_info["lower_left_xy"], inverse=True) else: - sector_info['lower_left_xy'] = p(*sector_info['lower_left_lonlat']) - if 'upper_right_xy' in sector_info: - sector_info['upper_right_lonlat'] = p(*sector_info['upper_right_xy'], inverse=True) + sector_info["lower_left_xy"] = p(*sector_info["lower_left_lonlat"]) + if "upper_right_xy" in sector_info: + sector_info["upper_right_lonlat"] = p(*sector_info["upper_right_xy"], inverse=True) else: - sector_info['upper_right_xy'] = p(*sector_info['upper_right_lonlat']) + sector_info["upper_right_xy"] = p(*sector_info["upper_right_lonlat"]) def _get_lettered_sector_info(self, sector_id): """Get metadata for the current sector if configured. @@ -1334,9 +1334,9 @@ def _get_tile_generator(self, area_def, lettered_grid, sector_id, sector_info = self._get_lettered_sector_info(sector_id) tile_gen = LetteredTileGenerator( area_def, - sector_info['lower_left_xy'] + sector_info['upper_right_xy'], - sector_crs=sector_info['projection'], - cell_size=sector_info['resolution'], + sector_info["lower_left_xy"] + sector_info["upper_right_xy"], + sector_crs=sector_info["projection"], + cell_size=sector_info["resolution"], num_subtiles=num_subtiles, use_sector_reference=use_sector_reference, ) @@ -1356,18 +1356,18 @@ def _area_id(area_def): # get all of the datasets stored by area area_datasets = {} for x in datasets: - area_id = _area_id(x.attrs['area']) - area, ds_list = area_datasets.setdefault(area_id, (x.attrs['area'], [])) + area_id = _area_id(x.attrs["area"]) + area, ds_list = area_datasets.setdefault(area_id, (x.attrs["area"], [])) ds_list.append(x) return area_datasets def _split_rgbs(self, ds): """Split a single RGB dataset in to multiple.""" - for component in 'RGB': + for component in "RGB": band_data = ds.sel(bands=component) - band_data.attrs['name'] += '_{}'.format(component) - band_data.attrs['valid_min'] = 0.0 - band_data.attrs['valid_max'] = 1.0 + band_data.attrs["name"] += "_{}".format(component) + band_data.attrs["valid_min"] = 0.0 + band_data.attrs["valid_max"] = 1.0 yield band_data def _enhance_and_split_rgbs(self, datasets): @@ -1377,7 +1377,7 @@ def _enhance_and_split_rgbs(self, datasets): if ds.ndim == 2: new_datasets.append(ds) continue - elif ds.ndim > 3 or ds.ndim < 1 or (ds.ndim == 3 and 'bands' not in ds.coords): + elif ds.ndim > 3 or ds.ndim < 1 or (ds.ndim == 3 and "bands" not in ds.coords): LOG.error("Can't save datasets with more or less than 2 dimensions " "that aren't RGBs to AWIPS Tiled format: %s", ds.name) else: @@ -1389,31 +1389,31 @@ def _enhance_and_split_rgbs(self, datasets): return new_datasets def _tile_filler(self, tile_info, data_arr): - fill = np.nan if np.issubdtype(data_arr.dtype, np.floating) else data_arr.attrs.get('_FillValue', 0) + fill = np.nan if np.issubdtype(data_arr.dtype, np.floating) else data_arr.attrs.get("_FillValue", 0) data_arr_data = data_arr.data[tile_info.data_slices] data_arr_data = data_arr_data.rechunk(data_arr_data.shape) new_data = da.map_blocks(tile_filler, data_arr_data, tile_info.tile_shape, tile_info.tile_slices, fill, dtype=data_arr.dtype, chunks=tile_info.tile_shape) - return xr.DataArray(new_data, dims=('y', 'x'), + return xr.DataArray(new_data, dims=("y", "x"), attrs=data_arr.attrs.copy()) def _slice_and_update_coords(self, tile_info, data_arrays): - new_x = xr.DataArray(tile_info.x, dims=('x',)) - if 'x' in data_arrays[0].coords: - old_x = data_arrays[0].coords['x'] + new_x = xr.DataArray(tile_info.x, dims=("x",)) + if "x" in data_arrays[0].coords: + old_x = data_arrays[0].coords["x"] new_x.attrs.update(old_x.attrs) new_x.encoding = old_x.encoding - new_y = xr.DataArray(tile_info.y, dims=('y',)) - if 'y' in data_arrays[0].coords: - old_y = data_arrays[0].coords['y'] + new_y = xr.DataArray(tile_info.y, dims=("y",)) + if "y" in data_arrays[0].coords: + old_y = data_arrays[0].coords["y"] new_y.attrs.update(old_y.attrs) new_y.encoding = old_y.encoding for data_arr in data_arrays: new_data_arr = self._tile_filler(tile_info, data_arr) - new_data_arr.coords['x'] = new_x - new_data_arr.coords['y'] = new_y + new_data_arr.coords["x"] = new_x + new_data_arr.coords["y"] = new_y yield new_data_arr def _iter_tile_info_and_datasets(self, tile_gen, data_arrays, single_variable=True): @@ -1491,9 +1491,9 @@ def _get_tile_data_info(self, data_arrs, creation_time, source_name): # use the first data array as a "representative" for the group ds_info = data_arrs[0].attrs.copy() # we want to use our own creation_time - ds_info['creation_time'] = creation_time + ds_info["creation_time"] = creation_time if source_name is not None: - ds_info['source_name'] = source_name + ds_info["source_name"] = source_name self._adjust_metadata_times(ds_info) return ds_info @@ -1503,8 +1503,8 @@ def save_datasets(self, datasets, sector_id=None, tile_count=(1, 1), tile_size=None, lettered_grid=False, num_subtiles=None, use_end_time=False, use_sector_reference=False, - template='polar', check_categories=True, - extra_global_attrs=None, environment_prefix='DR', + template="polar", check_categories=True, + extra_global_attrs=None, environment_prefix="DR", compute=True, **kwargs): """Write a series of DataArray objects to multiple NetCDF4 Tile files. @@ -1583,7 +1583,7 @@ def save_datasets(self, datasets, sector_id=None, """ if not isinstance(template, dict): - template = self.config['templates'][template] + template = self.config["templates"][template] template = AWIPSNetCDFTemplate(template, swap_end_time=use_end_time) area_data_arrs = self._group_by_area(datasets) datasets_to_save = [] @@ -1609,9 +1609,9 @@ def save_datasets(self, datasets, sector_id=None, shared_attrs=ds_info, extra_global_attrs=extra_global_attrs) if self.compress: - new_ds.encoding['zlib'] = True + new_ds.encoding["zlib"] = True for var in new_ds.variables.values(): - var.encoding['zlib'] = True + var.encoding["zlib"] = True datasets_to_save.append(new_ds) output_filenames.append(output_filename) @@ -1669,24 +1669,24 @@ def dataset_iter(_delayed_gen): return dataset_iter -def _create_debug_array(sector_info, num_subtiles, font_path='Verdana.ttf'): +def _create_debug_array(sector_info, num_subtiles, font_path="Verdana.ttf"): from PIL import Image, ImageDraw, ImageFont from pkg_resources import resource_filename as get_resource_filename size = (1000, 1000) img = Image.new("L", size, 0) draw = ImageDraw.Draw(img) - if ':' in font_path: + if ":" in font_path: # load from a python package - font_path = get_resource_filename(*font_path.split(':')) + font_path = get_resource_filename(*font_path.split(":")) font = ImageFont.truetype(font_path, 25) - ll_extent = sector_info['lower_left_xy'] - ur_extent = sector_info['upper_right_xy'] + ll_extent = sector_info["lower_left_xy"] + ur_extent = sector_info["upper_right_xy"] total_meters_x = ur_extent[0] - ll_extent[0] total_meters_y = ur_extent[1] - ll_extent[1] - fcs_x = np.ceil(float(sector_info['resolution'][1]) / num_subtiles[1]) - fcs_y = np.ceil(float(sector_info['resolution'][0]) / num_subtiles[0]) + fcs_x = np.ceil(float(sector_info["resolution"][1]) / num_subtiles[1]) + fcs_y = np.ceil(float(sector_info["resolution"][0]) / num_subtiles[0]) total_cells_x = np.ceil(total_meters_x / fcs_x) total_cells_y = np.ceil(total_meters_y / fcs_y) total_cells_x = np.ceil(total_cells_x / num_subtiles[1]) * num_subtiles[1] @@ -1735,10 +1735,10 @@ def _create_debug_array(sector_info, num_subtiles, font_path='Verdana.ttf'): ur_extent[1], ) grid_def = AreaDefinition( - 'debug_grid', - 'debug_grid', - 'debug_grid', - sector_info['projection'], + "debug_grid", + "debug_grid", + "debug_grid", + sector_info["projection"], 1000, 1000, new_extents @@ -1756,26 +1756,26 @@ def draw_rectangle(draw, coordinates, outline=None, fill=None, width=1): def create_debug_lettered_tiles(**writer_kwargs): """Create tile files with tile identifiers "burned" in to the image data for debugging.""" - writer_kwargs['lettered_grid'] = True - writer_kwargs['num_subtiles'] = (2, 2) # default, don't use command line argument + writer_kwargs["lettered_grid"] = True + writer_kwargs["num_subtiles"] = (2, 2) # default, don't use command line argument init_kwargs, save_kwargs = AWIPSTiledWriter.separate_init_kwargs(**writer_kwargs) writer = AWIPSTiledWriter(**init_kwargs) - sector_id = save_kwargs['sector_id'] + sector_id = save_kwargs["sector_id"] sector_info = writer.awips_sectors[sector_id] - area_def, arr = _create_debug_array(sector_info, save_kwargs['num_subtiles']) + area_def, arr = _create_debug_array(sector_info, save_kwargs["num_subtiles"]) now = datetime.utcnow() - product = xr.DataArray(da.from_array(arr, chunks='auto'), attrs=dict( - name='debug_{}'.format(sector_id), - platform_name='DEBUG', - sensor='TILES', + product = xr.DataArray(da.from_array(arr, chunks="auto"), attrs=dict( + name="debug_{}".format(sector_id), + platform_name="DEBUG", + sensor="TILES", start_time=now, end_time=now, area=area_def, standard_name="toa_bidirectional_reflectance", - units='1', + units="1", valid_min=0, valid_max=255, )) @@ -1790,12 +1790,12 @@ def main(): """Command line interface mimicing CSPP Polar2Grid.""" import argparse parser = argparse.ArgumentParser(description="Create AWIPS compatible NetCDF tile files") - parser.add_argument("--create-debug", action='store_true', - help='Create debug NetCDF files to show tile locations in AWIPS') - parser.add_argument('-v', '--verbose', dest='verbosity', action="count", default=0, - help='each occurrence increases verbosity 1 level through ' - 'ERROR-WARNING-INFO-DEBUG (default INFO)') - parser.add_argument('-l', '--log', dest="log_fn", default=None, + parser.add_argument("--create-debug", action="store_true", + help="Create debug NetCDF files to show tile locations in AWIPS") + parser.add_argument("-v", "--verbose", dest="verbosity", action="count", default=0, + help="each occurrence increases verbosity 1 level through " + "ERROR-WARNING-INFO-DEBUG (default INFO)") + parser.add_argument("-l", "--log", dest="log_fn", default=None, help="specify the log filename") group_1 = parser.add_argument_group(title="Writer Initialization") @@ -1812,17 +1812,17 @@ def main(): help="Specify how many pixels are in each tile (overrides '--tiles')") # group.add_argument('--tile-offset', nargs=2, default=(0, 0), # help="Start counting tiles from this offset ('row_offset col_offset')") - group_2.add_argument("--letters", dest="lettered_grid", action='store_true', + group_2.add_argument("--letters", dest="lettered_grid", action="store_true", help="Create tiles from a static letter-based grid based on the product projection") group_2.add_argument("--letter-subtiles", nargs=2, type=int, default=(2, 2), help="Specify number of subtiles in each lettered tile: \'row col\'") group_2.add_argument("--output-pattern", default=DEFAULT_OUTPUT_PATTERN, help="output filenaming pattern") - group_2.add_argument("--source-name", default='SSEC', + group_2.add_argument("--source-name", default="SSEC", help="specify processing source name used in attributes and filename (default 'SSEC')") group_2.add_argument("--sector-id", required=True, help="specify name for sector/region used in attributes and filename (example 'LCC')") - group_2.add_argument("--template", default='polar', + group_2.add_argument("--template", default="polar", help="specify the template name to use (default: polar)") args = parser.parse_args() @@ -1838,5 +1838,5 @@ def main(): raise NotImplementedError("Command line interface not implemented yet for AWIPS tiled writer") -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main()) diff --git a/satpy/writers/cf/coords_attrs.py b/satpy/writers/cf/coords_attrs.py index c7e559adc2..6ae80da468 100644 --- a/satpy/writers/cf/coords_attrs.py +++ b/satpy/writers/cf/coords_attrs.py @@ -19,28 +19,28 @@ def add_xy_coords_attrs(dataarray): dataarray = _add_xy_projected_coords_attrs(dataarray) else: dataarray = _add_xy_geographic_coords_attrs(dataarray) - if 'crs' in dataarray.coords: - dataarray = dataarray.drop_vars('crs') + if "crs" in dataarray.coords: + dataarray = dataarray.drop_vars("crs") return dataarray -def _add_xy_projected_coords_attrs(dataarray, x='x', y='y'): +def _add_xy_projected_coords_attrs(dataarray, x="x", y="y"): """Add relevant attributes to x, y coordinates of a projected CRS.""" if x in dataarray.coords: - dataarray[x].attrs['standard_name'] = 'projection_x_coordinate' - dataarray[x].attrs['units'] = 'm' + dataarray[x].attrs["standard_name"] = "projection_x_coordinate" + dataarray[x].attrs["units"] = "m" if y in dataarray.coords: - dataarray[y].attrs['standard_name'] = 'projection_y_coordinate' - dataarray[y].attrs['units'] = 'm' + dataarray[y].attrs["standard_name"] = "projection_y_coordinate" + dataarray[y].attrs["units"] = "m" return dataarray -def _add_xy_geographic_coords_attrs(dataarray, x='x', y='y'): +def _add_xy_geographic_coords_attrs(dataarray, x="x", y="y"): """Add relevant attributes to x, y coordinates of a geographic CRS.""" if x in dataarray.coords: - dataarray[x].attrs['standard_name'] = 'longitude' - dataarray[x].attrs['units'] = 'degrees_east' + dataarray[x].attrs["standard_name"] = "longitude" + dataarray[x].attrs["units"] = "degrees_east" if y in dataarray.coords: - dataarray[y].attrs['standard_name'] = 'latitude' - dataarray[y].attrs['units'] = 'degrees_north' + dataarray[y].attrs["standard_name"] = "latitude" + dataarray[y].attrs["units"] = "degrees_north" return dataarray diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 702e25c2fa..506a8bf561 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -190,41 +190,41 @@ # Ensure that either netCDF4 or h5netcdf is available to avoid silent failure if netCDF4 is None and h5netcdf is None: - raise ImportError('Ensure that the netCDF4 or h5netcdf package is installed.') + raise ImportError("Ensure that the netCDF4 or h5netcdf package is installed.") # Numpy datatypes compatible with all netCDF4 backends. ``np.str_`` is # excluded because h5py (and thus h5netcdf) has problems with unicode, see # https://github.com/h5py/h5py/issues/624.""" -NC4_DTYPES = [np.dtype('int8'), np.dtype('uint8'), - np.dtype('int16'), np.dtype('uint16'), - np.dtype('int32'), np.dtype('uint32'), - np.dtype('int64'), np.dtype('uint64'), - np.dtype('float32'), np.dtype('float64'), +NC4_DTYPES = [np.dtype("int8"), np.dtype("uint8"), + np.dtype("int16"), np.dtype("uint16"), + np.dtype("int32"), np.dtype("uint32"), + np.dtype("int64"), np.dtype("uint64"), + np.dtype("float32"), np.dtype("float64"), np.bytes_] # Unsigned and int64 isn't CF 1.7 compatible # Note: Unsigned and int64 are CF 1.9 compatible -CF_DTYPES = [np.dtype('int8'), - np.dtype('int16'), - np.dtype('int32'), - np.dtype('float32'), - np.dtype('float64'), +CF_DTYPES = [np.dtype("int8"), + np.dtype("int16"), + np.dtype("int32"), + np.dtype("float32"), + np.dtype("float64"), np.bytes_] -CF_VERSION = 'CF-1.7' +CF_VERSION = "CF-1.7" def get_extra_ds(dataarray, keys=None): """Get the ancillary_variables DataArrays associated to a dataset.""" ds_collection = {} # Retrieve ancillary variable datarrays - for ancillary_dataarray in dataarray.attrs.get('ancillary_variables', []): + for ancillary_dataarray in dataarray.attrs.get("ancillary_variables", []): ancillary_variable = ancillary_dataarray.name if keys and ancillary_variable not in keys: keys.append(ancillary_variable) ds_collection.update(get_extra_ds(ancillary_dataarray, keys=keys)) # Add input dataarray - ds_collection[dataarray.attrs['name']] = dataarray + ds_collection[dataarray.attrs["name"]] = dataarray return ds_collection @@ -235,20 +235,20 @@ def get_extra_ds(dataarray, keys=None): def add_lonlat_coords(dataarray): """Add 'longitude' and 'latitude' coordinates to DataArray.""" dataarray = dataarray.copy() - area = dataarray.attrs['area'] - ignore_dims = {dim: 0 for dim in dataarray.dims if dim not in ['x', 'y']} - chunks = getattr(dataarray.isel(**ignore_dims), 'chunks', None) + area = dataarray.attrs["area"] + ignore_dims = {dim: 0 for dim in dataarray.dims if dim not in ["x", "y"]} + chunks = getattr(dataarray.isel(**ignore_dims), "chunks", None) lons, lats = area.get_lonlats(chunks=chunks) - dataarray['longitude'] = xr.DataArray(lons, dims=['y', 'x'], - attrs={'name': "longitude", - 'standard_name': "longitude", - 'units': 'degrees_east'}, - name='longitude') - dataarray['latitude'] = xr.DataArray(lats, dims=['y', 'x'], - attrs={'name': "latitude", - 'standard_name': "latitude", - 'units': 'degrees_north'}, - name='latitude') + dataarray["longitude"] = xr.DataArray(lons, dims=["y", "x"], + attrs={"name": "longitude", + "standard_name": "longitude", + "units": "degrees_east"}, + name="longitude") + dataarray["latitude"] = xr.DataArray(lats, dims=["y", "x"], + attrs={"name": "latitude", + "standard_name": "latitude", + "units": "degrees_north"}, + name="latitude") return dataarray @@ -256,7 +256,7 @@ def _create_grid_mapping(area): """Create the grid mapping instance for `area`.""" import pyproj - if Version(pyproj.__version__) < Version('2.4.1'): + if Version(pyproj.__version__) < Version("2.4.1"): # technically 2.2, but important bug fixes in 2.4.1 raise ImportError("'cf' writer requires pyproj 2.4.1 or greater") # let pyproj do the heavily lifting (pyproj 2.0+ required) @@ -267,18 +267,18 @@ def _create_grid_mapping(area): def _add_grid_mapping(dataarray): """Convert an area to at CF grid mapping.""" dataarray = dataarray.copy() - area = dataarray.attrs['area'] + area = dataarray.attrs["area"] gmapping_var_name, attrs = _create_grid_mapping(area) - dataarray.attrs['grid_mapping'] = gmapping_var_name + dataarray.attrs["grid_mapping"] = gmapping_var_name return dataarray, xr.DataArray(0, attrs=attrs, name=gmapping_var_name) def area2cf(dataarray, include_lonlats=False, got_lonlats=False): """Convert an area to at CF grid mapping or lon and lats.""" res = [] - if not got_lonlats and (isinstance(dataarray.attrs['area'], SwathDefinition) or include_lonlats): + if not got_lonlats and (isinstance(dataarray.attrs["area"], SwathDefinition) or include_lonlats): dataarray = add_lonlat_coords(dataarray) - if isinstance(dataarray.attrs['area'], AreaDefinition): + if isinstance(dataarray.attrs["area"], AreaDefinition): dataarray, gmapping = _add_grid_mapping(dataarray) res.append(gmapping) res.append(dataarray) @@ -287,7 +287,7 @@ def area2cf(dataarray, include_lonlats=False, got_lonlats=False): def is_lon_or_lat_dataarray(dataarray): """Check if the DataArray represents the latitude or longitude coordinate.""" - if 'standard_name' in dataarray.attrs and dataarray.attrs['standard_name'] in ['longitude', 'latitude']: + if "standard_name" in dataarray.attrs and dataarray.attrs["standard_name"] in ["longitude", "latitude"]: return True return False @@ -339,12 +339,12 @@ def make_alt_coords_unique(datas, pretty=False): if pretty: warnings.warn( 'Cannot pretty-format "{}" coordinates because they are ' - 'not identical among the given datasets'.format(coord_name), + "not identical among the given datasets".format(coord_name), stacklevel=2 ) for ds_name, dataset in datas.items(): if coord_name in dataset.coords: - rename = {coord_name: '{}_{}'.format(ds_name, coord_name)} + rename = {coord_name: "{}_{}".format(ds_name, coord_name)} new_datas[ds_name] = new_datas[ds_name].rename(rename) return new_datas @@ -355,15 +355,15 @@ def assert_xy_unique(datas): unique_x = set() unique_y = set() for dataset in datas.values(): - if 'y' in dataset.dims: - token_y = tokenize(dataset['y'].data) + if "y" in dataset.dims: + token_y = tokenize(dataset["y"].data) unique_y.add(token_y) - if 'x' in dataset.dims: - token_x = tokenize(dataset['x'].data) + if "x" in dataset.dims: + token_x = tokenize(dataset["x"].data) unique_x.add(token_x) if len(unique_x) > 1 or len(unique_y) > 1: - raise ValueError('Datasets to be saved in one file (or one group) must have identical projection coordinates. ' - 'Please group them by area or save them in separate files.') + raise ValueError("Datasets to be saved in one file (or one group) must have identical projection coordinates. " + "Please group them by area or save them in separate files.") def link_coords(datas): @@ -376,9 +376,9 @@ def link_coords(datas): """ for da_name, data in datas.items(): - declared_coordinates = data.attrs.get('coordinates', []) + declared_coordinates = data.attrs.get("coordinates", []) if isinstance(declared_coordinates, str): - declared_coordinates = declared_coordinates.split(' ') + declared_coordinates = declared_coordinates.split(" ") for coord in declared_coordinates: if coord not in data.coords: try: @@ -387,13 +387,13 @@ def link_coords(datas): except KeyError: warnings.warn( 'Coordinate "{}" referenced by dataarray {} does not ' - 'exist, dropping reference.'.format(coord, da_name), + "exist, dropping reference.".format(coord, da_name), stacklevel=2 ) continue # Drop 'coordinates' attribute in any case to avoid conflicts in xr.Dataset.to_netcdf() - data.attrs.pop('coordinates', None) + data.attrs.pop("coordinates", None) # ###--------------------------------------------------------------------------. @@ -410,11 +410,11 @@ def add_time_bounds_dimension(ds, time="time"): if start_time is not None) end_time = min(end_time for end_time in end_times if end_time is not None) - ds['time_bnds'] = xr.DataArray([[np.datetime64(start_time), + ds["time_bnds"] = xr.DataArray([[np.datetime64(start_time), np.datetime64(end_time)]], - dims=['time', 'bnds_1d']) - ds[time].attrs['bounds'] = "time_bnds" - ds[time].attrs['standard_name'] = "time" + dims=["time", "bnds_1d"]) + ds[time].attrs["bounds"] = "time_bnds" + ds[time].attrs["standard_name"] = "time" return ds @@ -429,13 +429,13 @@ def _process_time_coord(dataarray, epoch): - the time coordinate has size 1 """ - if 'time' in dataarray.coords: - dataarray['time'].encoding['units'] = epoch - dataarray['time'].attrs['standard_name'] = 'time' - dataarray['time'].attrs.pop('bounds', None) + if "time" in dataarray.coords: + dataarray["time"].encoding["units"] = epoch + dataarray["time"].attrs["standard_name"] = "time" + dataarray["time"].attrs.pop("bounds", None) - if 'time' not in dataarray.dims and dataarray["time"].size not in dataarray.shape: - dataarray = dataarray.expand_dims('time') + if "time" not in dataarray.dims and dataarray["time"].size not in dataarray.shape: + dataarray = dataarray.expand_dims("time") return dataarray @@ -503,7 +503,7 @@ def _encode_nc(obj): return [s.lower() for s in obj.astype(str)] return obj.tolist() - raise ValueError('Unable to encode') + raise ValueError("Unable to encode") def encode_nc(obj): @@ -552,10 +552,10 @@ def encode_attrs_nc(attrs): def _add_ancillary_variables_attrs(dataarray): """Replace ancillary_variables DataArray with a list of their name.""" - list_ancillary_variable_names = [da_ancillary.attrs['name'] - for da_ancillary in dataarray.attrs.get('ancillary_variables', [])] + list_ancillary_variable_names = [da_ancillary.attrs["name"] + for da_ancillary in dataarray.attrs.get("ancillary_variables", [])] if list_ancillary_variable_names: - dataarray.attrs['ancillary_variables'] = ' '.join(list_ancillary_variable_names) + dataarray.attrs["ancillary_variables"] = " ".join(list_ancillary_variable_names) else: dataarray.attrs.pop("ancillary_variables", None) return dataarray @@ -572,17 +572,17 @@ def _drop_exclude_attrs(dataarray, exclude_attrs): def _remove_satpy_attrs(new_data): """Remove _satpy attribute.""" - satpy_attrs = [key for key in new_data.attrs if key.startswith('_satpy')] + satpy_attrs = [key for key in new_data.attrs if key.startswith("_satpy")] for satpy_attr in satpy_attrs: new_data.attrs.pop(satpy_attr) - new_data.attrs.pop('_last_resampler', None) + new_data.attrs.pop("_last_resampler", None) return new_data def _format_prerequisites_attrs(dataarray): """Reformat prerequisites attribute value to string.""" - if 'prerequisites' in dataarray.attrs: - dataarray.attrs['prerequisites'] = [np.bytes_(str(prereq)) for prereq in dataarray.attrs['prerequisites']] + if "prerequisites" in dataarray.attrs: + dataarray.attrs["prerequisites"] = [np.bytes_(str(prereq)) for prereq in dataarray.attrs["prerequisites"]] return dataarray @@ -603,8 +603,8 @@ def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs): dataarray = _remove_none_attrs(dataarray) _ = dataarray.attrs.pop("area", None) - if 'long_name' not in dataarray.attrs and 'standard_name' not in dataarray.attrs: - dataarray.attrs['long_name'] = dataarray.name + if "long_name" not in dataarray.attrs and "standard_name" not in dataarray.attrs: + dataarray.attrs["long_name"] = dataarray.name if flatten_attrs: dataarray.attrs = flatten_dict(dataarray.attrs) @@ -642,7 +642,7 @@ def _set_default_chunks(encoding, dataset): variable.shape]).min(axis=0) ) # Chunksize may not exceed shape encoding.setdefault(var_name, {}) - encoding[var_name].setdefault('chunksizes', chunks) + encoding[var_name].setdefault("chunksizes", chunks) return encoding @@ -657,7 +657,7 @@ def _set_default_fill_value(encoding, dataset): coord_vars.extend(set(data_array.dims).intersection(data_array.coords)) for coord_var in coord_vars: encoding.setdefault(coord_var, {}) - encoding[coord_var].update({'_FillValue': None}) + encoding[coord_var].update({"_FillValue": None}) return encoding @@ -668,20 +668,20 @@ def _set_default_time_encoding(encoding, dataset): Default is xarray's CF datetime encoding, which can be overridden by user-defined encoding. """ - if 'time' in dataset: + if "time" in dataset: try: - dtnp64 = dataset['time'].data[0] + dtnp64 = dataset["time"].data[0] except IndexError: - dtnp64 = dataset['time'].data + dtnp64 = dataset["time"].data default = CFDatetimeCoder().encode(xr.DataArray(dtnp64)) - time_enc = {'units': default.attrs['units'], 'calendar': default.attrs['calendar']} - time_enc.update(encoding.get('time', {})) - bounds_enc = {'units': time_enc['units'], - 'calendar': time_enc['calendar'], - '_FillValue': None} - encoding['time'] = time_enc - encoding['time_bnds'] = bounds_enc # FUTURE: Not required anymore with xarray-0.14+ + time_enc = {"units": default.attrs["units"], "calendar": default.attrs["calendar"]} + time_enc.update(encoding.get("time", {})) + bounds_enc = {"units": time_enc["units"], + "calendar": time_enc["calendar"], + "_FillValue": None} + encoding["time"] = time_enc + encoding["time_bnds"] = bounds_enc # FUTURE: Not required anymore with xarray-0.14+ return encoding @@ -697,20 +697,20 @@ def _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix): for var_name in list(dataset.variables): if not numeric_name_prefix or not var_name.startswith(numeric_name_prefix): continue - orig_var_name = var_name.replace(numeric_name_prefix, '') + orig_var_name = var_name.replace(numeric_name_prefix, "") if orig_var_name in encoding: encoding[var_name] = encoding.pop(orig_var_name) return encoding -def update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix='CHANNEL_'): +def update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix="CHANNEL_"): """Update encoding. Preserve dask chunks, avoid fill values in coordinate variables and make sure that time & time bounds have the same units. """ other_to_netcdf_kwargs = to_netcdf_kwargs.copy() - encoding = other_to_netcdf_kwargs.pop('encoding', {}).copy() + encoding = other_to_netcdf_kwargs.pop("encoding", {}).copy() encoding = _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix) encoding = _set_default_chunks(encoding, dataset) encoding = _set_default_fill_value(encoding, dataset) @@ -728,7 +728,7 @@ def _handle_dataarray_name(original_name, numeric_name_prefix): new_name = numeric_name_prefix + original_name else: warnings.warn( - f'Invalid NetCDF dataset name: {original_name} starts with a digit.', + f"Invalid NetCDF dataset name: {original_name} starts with a digit.", stacklevel=5 ) new_name = original_name # occurs when numeric_name_prefix = '', None or False @@ -741,26 +741,26 @@ def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name """Change the DataArray name by prepending numeric_name_prefix if the name is a digit.""" original_name = None dataarray = dataarray.copy() - if 'name' in dataarray.attrs: - original_name = dataarray.attrs.pop('name') + if "name" in dataarray.attrs: + original_name = dataarray.attrs.pop("name") original_name, new_name = _handle_dataarray_name(original_name, numeric_name_prefix) dataarray = dataarray.rename(new_name) if include_orig_name and numeric_name_prefix and original_name and original_name != new_name: - dataarray.attrs['original_name'] = original_name + dataarray.attrs["original_name"] = original_name return dataarray def _add_history(attrs): """Add 'history' attribute to dictionary.""" - _history_create = 'Created by pytroll/satpy on {}'.format(datetime.utcnow()) - if 'history' in attrs: - if isinstance(attrs['history'], list): - attrs['history'] = ''.join(attrs['history']) - attrs['history'] += '\n' + _history_create + _history_create = "Created by pytroll/satpy on {}".format(datetime.utcnow()) + if "history" in attrs: + if isinstance(attrs["history"], list): + attrs["history"] = "".join(attrs["history"]) + attrs["history"] += "\n" + _history_create else: - attrs['history'] = _history_create + attrs["history"] = _history_create return attrs @@ -776,7 +776,7 @@ def _get_groups(groups, list_datarrays): grouped_dataarrays = defaultdict(list) for datarray in list_datarrays: for group_name, group_members in groups.items(): - if datarray.attrs['name'] in group_members: + if datarray.attrs["name"] in group_members: grouped_dataarrays[group_name].append(datarray) break return grouped_dataarrays @@ -787,7 +787,7 @@ def make_cf_dataarray(dataarray, flatten_attrs=False, exclude_attrs=None, include_orig_name=True, - numeric_name_prefix='CHANNEL_'): + numeric_name_prefix="CHANNEL_"): """Make the xr.DataArray CF-compliant. Parameters @@ -809,7 +809,7 @@ def make_cf_dataarray(dataarray, Prepend dataset name with this if starting with a digit. The default is ``"CHANNEL_"``. - Returns + Returns: ------- new_data : xr.DataArray CF-compliant xr.DataArray. @@ -833,7 +833,7 @@ def _collect_cf_dataset(list_dataarrays, include_lonlats=True, pretty=False, include_orig_name=True, - numeric_name_prefix='CHANNEL_'): + numeric_name_prefix="CHANNEL_"): """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Dataset. Parameters @@ -859,7 +859,7 @@ def _collect_cf_dataset(list_dataarrays, Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. - Returns + Returns: ------- ds : xr.Dataset A partially CF-compliant xr.Dataset @@ -881,7 +881,7 @@ def _collect_cf_dataset(list_dataarrays, dataarray_type = dataarray.dtype if dataarray_type not in CF_DTYPES: warnings.warn( - f'dtype {dataarray_type} not compatible with {CF_VERSION}.', + f"dtype {dataarray_type} not compatible with {CF_VERSION}.", stacklevel=3 ) # Deep copy the datarray since adding/modifying attributes and coordinates @@ -938,7 +938,7 @@ def collect_cf_datasets(list_dataarrays, include_lonlats=True, epoch=EPOCH, include_orig_name=True, - numeric_name_prefix='CHANNEL_', + numeric_name_prefix="CHANNEL_", groups=None): """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Datasets. @@ -977,7 +977,7 @@ def collect_cf_datasets(list_dataarrays, It is used to create grouped netCDFs using the CF_Writer. If None (the default), no groups will be created. - Returns + Returns: ------- grouped_datasets : dict A dictionary of CF-compliant xr.Dataset: {group_name: xr.Dataset} @@ -1003,7 +1003,7 @@ def collect_cf_datasets(list_dataarrays, # If not grouped, add CF conventions. # - If 'Conventions' key already present, do not overwrite ! if "Conventions" not in header_attrs and not is_grouped: - header_attrs['Conventions'] = CF_VERSION + header_attrs["Conventions"] = CF_VERSION # Create dictionary of group xr.Datasets # --> If no groups (groups=None) --> group_name=None @@ -1022,7 +1022,7 @@ def collect_cf_datasets(list_dataarrays, if not is_grouped: ds.attrs = header_attrs - if 'time' in ds: + if "time" in ds: ds = add_time_bounds_dimension(ds, time="time") grouped_datasets[group_name] = ds @@ -1032,7 +1032,7 @@ def collect_cf_datasets(list_dataarrays, def _sanitize_writer_kwargs(writer_kwargs): """Remove satpy-specific kwargs.""" writer_kwargs = copy.deepcopy(writer_kwargs) - satpy_kwargs = ['overlay', 'decorate', 'config_files'] + satpy_kwargs = ["overlay", "decorate", "config_files"] for kwarg in satpy_kwargs: writer_kwargs.pop(kwarg, None) return writer_kwargs @@ -1042,9 +1042,9 @@ def _initialize_root_netcdf(filename, engine, header_attrs, to_netcdf_kwargs): """Initialize root empty netCDF.""" root = xr.Dataset({}, attrs=header_attrs) init_nc_kwargs = to_netcdf_kwargs.copy() - init_nc_kwargs.pop('encoding', None) # No variables to be encoded at this point - init_nc_kwargs.pop('unlimited_dims', None) - written = [root.to_netcdf(filename, engine=engine, mode='w', **init_nc_kwargs)] + init_nc_kwargs.pop("encoding", None) # No variables to be encoded at this point + init_nc_kwargs.pop("unlimited_dims", None) + written = [root.to_netcdf(filename, engine=engine, mode="w", **init_nc_kwargs)] return written @@ -1053,7 +1053,7 @@ class CFWriter(Writer): @staticmethod def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, - include_orig_name=True, numeric_name_prefix='CHANNEL_'): + include_orig_name=True, numeric_name_prefix="CHANNEL_"): """Convert the dataarray to something cf-compatible. Args: @@ -1070,8 +1070,8 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, numeric_name_prefix (str): Prepend dataset name with this if starting with a digit """ - warnings.warn('CFWriter.da2cf is deprecated.' - 'Use satpy.writers.cf_writer.make_cf_dataarray instead.', + warnings.warn("CFWriter.da2cf is deprecated." + "Use satpy.writers.cf_writer.make_cf_dataarray instead.", DeprecationWarning, stacklevel=3) return make_cf_dataarray(dataarray=dataarray, epoch=epoch, @@ -1083,8 +1083,8 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, @staticmethod def update_encoding(dataset, to_netcdf_kwargs): """Update encoding info (deprecated).""" - warnings.warn('CFWriter.update_encoding is deprecated. ' - 'Use satpy.writers.cf_writer.update_encoding instead.', + warnings.warn("CFWriter.update_encoding is deprecated. " + "Use satpy.writers.cf_writer.update_encoding instead.", DeprecationWarning, stacklevel=3) return update_encoding(dataset, to_netcdf_kwargs) @@ -1094,7 +1094,7 @@ def save_dataset(self, dataset, filename=None, fill_value=None, **kwargs): def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, include_lonlats=True, pretty=False, - include_orig_name=True, numeric_name_prefix='CHANNEL_', **to_netcdf_kwargs): + include_orig_name=True, numeric_name_prefix="CHANNEL_", **to_netcdf_kwargs): """Save the given datasets in one netCDF file. Note that all datasets (if grouping: in one group) must have the same projection coordinates. @@ -1130,7 +1130,7 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out. """ - logger.info('Saving datasets to NetCDF4/CF.') + logger.info("Saving datasets to NetCDF4/CF.") _check_backend_versions() # Define netCDF filename if not provided diff --git a/satpy/writers/geotiff.py b/satpy/writers/geotiff.py index ba3cad7d6a..1a522ecd68 100644 --- a/satpy/writers/geotiff.py +++ b/satpy/writers/geotiff.py @@ -131,7 +131,7 @@ def separate_init_kwargs(cls, kwargs): # FUTURE: Don't pass Scene.save_datasets kwargs to init and here init_kwargs, kwargs = super(GeoTIFFWriter, cls).separate_init_kwargs( kwargs) - for kw in ['dtype', 'tags']: + for kw in ["dtype", "tags"]: if kw in kwargs: init_kwargs[kw] = kwargs.pop(kw) @@ -246,7 +246,7 @@ def save_image( gdal_options = self._get_gdal_options(kwargs) if fill_value is None: # fall back to fill_value from configuration file - fill_value = self.info.get('fill_value') + fill_value = self.info.get("fill_value") dtype = dtype if dtype is not None else self.dtype if dtype is None and self.enhancer is not False: @@ -268,14 +268,14 @@ def save_image( fill_value = np.nan if keep_palette and cmap is None and img.palette is not None: from satpy.enhancements import create_colormap - cmap = create_colormap({'colors': img.palette}) + cmap = create_colormap({"colors": img.palette}) cmap.set_range(0, len(img.palette) - 1) if tags is None: tags = {} tags.update(self.tags) - return img.save(filename, fformat='tif', driver=driver, + return img.save(filename, fformat="tif", driver=driver, fill_value=fill_value, dtype=dtype, compute=compute, keep_palette=keep_palette, cmap=cmap, diff --git a/satpy/writers/mitiff.py b/satpy/writers/mitiff.py index 11f847c114..950fce8b21 100644 --- a/satpy/writers/mitiff.py +++ b/satpy/writers/mitiff.py @@ -35,19 +35,19 @@ def _adjust_kwargs(dataset, kwargs): - if 'platform_name' not in kwargs: - kwargs['platform_name'] = dataset.attrs['platform_name'] - if 'name' not in kwargs: - kwargs['name'] = dataset.attrs['name'] - if 'start_time' not in kwargs: - kwargs['start_time'] = dataset.attrs['start_time'] - if 'sensor' not in kwargs: - kwargs['sensor'] = dataset.attrs['sensor'] + if "platform_name" not in kwargs: + kwargs["platform_name"] = dataset.attrs["platform_name"] + if "name" not in kwargs: + kwargs["name"] = dataset.attrs["name"] + if "start_time" not in kwargs: + kwargs["start_time"] = dataset.attrs["start_time"] + if "sensor" not in kwargs: + kwargs["sensor"] = dataset.attrs["sensor"] # Sensor attrs could be set. MITIFFs needing to handle sensor can only have one sensor # Assume the first value of set as the sensor. - if isinstance(kwargs['sensor'], set): - LOG.warning('Sensor is set, will use the first value: %s', kwargs['sensor']) - kwargs['sensor'] = (list(kwargs['sensor']))[0] + if isinstance(kwargs["sensor"], set): + LOG.warning("Sensor is set, will use the first value: %s", kwargs["sensor"]) + kwargs["sensor"] = (list(kwargs["sensor"]))[0] class MITIFFWriter(ImageWriter): @@ -80,22 +80,22 @@ def save_dataset(self, dataset, filename=None, fill_value=None, def _delayed_create(dataset): try: - if 'palette' in kwargs: - self.palette = kwargs['palette'] + if "palette" in kwargs: + self.palette = kwargs["palette"] _adjust_kwargs(dataset, kwargs) try: - self.mitiff_config[kwargs['sensor']] = dataset.attrs['metadata_requirements']['config'] - self.channel_order[kwargs['sensor']] = dataset.attrs['metadata_requirements']['order'] - self.file_pattern = dataset.attrs['metadata_requirements']['file_pattern'] + self.mitiff_config[kwargs["sensor"]] = dataset.attrs["metadata_requirements"]["config"] + self.channel_order[kwargs["sensor"]] = dataset.attrs["metadata_requirements"]["order"] + self.file_pattern = dataset.attrs["metadata_requirements"]["file_pattern"] except KeyError: # For some mitiff products this info is needed, for others not. # If needed you should know how to fix this pass try: - self.translate_channel_name[kwargs['sensor']] = \ - dataset.attrs['metadata_requirements']['translate'] + self.translate_channel_name[kwargs["sensor"]] = \ + dataset.attrs["metadata_requirements"]["translate"] except KeyError: # For some mitiff products this info is needed, for others not. # If needed you should know how to fix this @@ -127,11 +127,11 @@ def _delayed_create(datasets): _adjust_kwargs(dataset, kwargs) try: - self.mitiff_config[kwargs['sensor']] = dataset.attrs['metadata_requirements']['config'] - translate = dataset.attrs['metadata_requirements']['translate'] - self.translate_channel_name[kwargs['sensor']] = translate - self.channel_order[kwargs['sensor']] = dataset.attrs['metadata_requirements']['order'] - self.file_pattern = dataset.attrs['metadata_requirements']['file_pattern'] + self.mitiff_config[kwargs["sensor"]] = dataset.attrs["metadata_requirements"]["config"] + translate = dataset.attrs["metadata_requirements"]["translate"] + self.translate_channel_name[kwargs["sensor"]] = translate + self.channel_order[kwargs["sensor"]] = dataset.attrs["metadata_requirements"]["order"] + self.file_pattern = dataset.attrs["metadata_requirements"]["file_pattern"] except KeyError: # For some mitiff products this info is needed, for others not. # If needed you should know how to fix this @@ -140,9 +140,9 @@ def _delayed_create(datasets): image_description = self._make_image_description(datasets, **kwargs) LOG.debug("File pattern %s", self.file_pattern) if isinstance(datasets, list): - kwargs['start_time'] = dataset.attrs['start_time'] + kwargs["start_time"] = dataset.attrs["start_time"] else: - kwargs['start_time'] = datasets.attrs['start_time'] + kwargs["start_time"] = datasets.attrs["start_time"] gen_filename = filename or self.get_filename(**kwargs) LOG.info("Saving mitiff to: %s ...", gen_filename) self._save_datasets_as_mitiff(datasets, image_description, gen_filename, **kwargs) @@ -161,8 +161,8 @@ def _make_channel_list(self, datasets, **kwargs): if self.channel_order: channels = self._reorder_channels(datasets, **kwargs) elif self.palette: - if 'palette_channel_name' in kwargs: - channels.append(kwargs['palette_channel_name'].upper()) + if "palette_channel_name" in kwargs: + channels.append(kwargs["palette_channel_name"].upper()) else: LOG.error("Is palette but can not find palette_channel_name to name the dataset") else: @@ -175,17 +175,17 @@ def _make_channel_list(self, datasets, **kwargs): def _reorder_channels(self, datasets, **kwargs): channels = [] - for cn in self.channel_order[kwargs['sensor']]: + for cn in self.channel_order[kwargs["sensor"]]: for ch, ds in enumerate(datasets): - if isinstance(ds.attrs['prerequisites'][ch], (DataQuery, DataID)): - if ds.attrs['prerequisites'][ch]['name'] == cn: + if isinstance(ds.attrs["prerequisites"][ch], (DataQuery, DataID)): + if ds.attrs["prerequisites"][ch]["name"] == cn: channels.append( - ds.attrs['prerequisites'][ch]['name']) + ds.attrs["prerequisites"][ch]["name"]) break else: - if ds.attrs['prerequisites'][ch] == cn: + if ds.attrs["prerequisites"][ch] == cn: channels.append( - ds.attrs['prerequisites'][ch]) + ds.attrs["prerequisites"][ch]) break return channels @@ -194,29 +194,29 @@ def _channel_names(self, channels, cns, **kwargs): for ch in channels: try: _image_description += str( - self.mitiff_config[kwargs['sensor']][cns.get(ch, ch)]['alias']) + self.mitiff_config[kwargs["sensor"]][cns.get(ch, ch)]["alias"]) except KeyError: _image_description += str(ch) - _image_description += ' ' + _image_description += " " # Replace last char(space) with \n _image_description = _image_description[:-1] - _image_description += '\n' + _image_description += "\n" return _image_description def _add_sizes(self, datasets, first_dataset): - _image_description = ' Xsize: ' + _image_description = " Xsize: " if isinstance(datasets, list): - _image_description += str(first_dataset.sizes['x']) + '\n' + _image_description += str(first_dataset.sizes["x"]) + "\n" else: - _image_description += str(datasets.sizes['x']) + '\n' + _image_description += str(datasets.sizes["x"]) + "\n" - _image_description += ' Ysize: ' + _image_description += " Ysize: " if isinstance(datasets, list): - _image_description += str(first_dataset.sizes['y']) + '\n' + _image_description += str(first_dataset.sizes["y"]) + "\n" else: - _image_description += str(datasets.sizes['y']) + '\n' + _image_description += str(datasets.sizes["y"]) + "\n" return _image_description @@ -224,12 +224,12 @@ def _add_proj4_string(self, datasets, first_dataset): proj4_string = " Proj string: " if isinstance(datasets, list): - area = first_dataset.attrs['area'] + area = first_dataset.attrs["area"] else: - area = datasets.attrs['area'] + area = datasets.attrs["area"] # Use pyproj's CRS object to get a valid EPSG code if possible # only in newer pyresample versions with pyproj 2.0+ installed - if hasattr(area, 'crs') and area.crs.to_epsg() is not None: + if hasattr(area, "crs") and area.crs.to_epsg() is not None: proj4_string += "+init=EPSG:{}".format(area.crs.to_epsg()) else: proj4_string += area.proj_str @@ -239,23 +239,23 @@ def _add_proj4_string(self, datasets, first_dataset): # FUTURE: Use pyproj 2.0+ to convert EPSG to PROJ4 if possible proj4_string, x_0 = self._convert_epsg_to_proj(proj4_string, x_0) - if 'geos' in proj4_string: + if "geos" in proj4_string: proj4_string = proj4_string.replace("+sweep=x ", "") - if '+a=6378137.0 +b=6356752.31414' in proj4_string: + if "+a=6378137.0 +b=6356752.31414" in proj4_string: proj4_string = proj4_string.replace("+a=6378137.0 +b=6356752.31414", "+ellps=WGS84") - if '+units=m' in proj4_string: + if "+units=m" in proj4_string: proj4_string = proj4_string.replace("+units=m", "+units=km") - if not any(datum in proj4_string for datum in ['datum', 'towgs84']): - proj4_string += ' +towgs84=0,0,0' + if not any(datum in proj4_string for datum in ["datum", "towgs84"]): + proj4_string += " +towgs84=0,0,0" - if 'units' not in proj4_string: - proj4_string += ' +units=km' + if "units" not in proj4_string: + proj4_string += " +units=km" proj4_string = self._append_projection_center(proj4_string, datasets, first_dataset, x_0, y_0) LOG.debug("proj4_string: %s", proj4_string) - proj4_string += '\n' + proj4_string += "\n" return proj4_string @@ -264,59 +264,59 @@ def _append_projection_center(self, proj4_string, datasets, first_dataset, x_0, dataset = first_dataset else: dataset = datasets - if 'x_0' not in proj4_string: - proj4_string += ' +x_0=%.6f' % ( - (-dataset.attrs['area'].area_extent[0] + - dataset.attrs['area'].pixel_size_x) + x_0) - proj4_string += ' +y_0=%.6f' % ( - (-dataset.attrs['area'].area_extent[1] + - dataset.attrs['area'].pixel_size_y) + y_0) - elif '+x_0=0' in proj4_string and '+y_0=0' in proj4_string: - proj4_string = proj4_string.replace("+x_0=0", '+x_0=%.6f' % ( - (-dataset.attrs['area'].area_extent[0] + - dataset.attrs['area'].pixel_size_x) + x_0)) - proj4_string = proj4_string.replace("+y_0=0", '+y_0=%.6f' % ( - (-dataset.attrs['area'].area_extent[1] + - dataset.attrs['area'].pixel_size_y) + y_0)) + if "x_0" not in proj4_string: + proj4_string += " +x_0=%.6f" % ( + (-dataset.attrs["area"].area_extent[0] + + dataset.attrs["area"].pixel_size_x) + x_0) + proj4_string += " +y_0=%.6f" % ( + (-dataset.attrs["area"].area_extent[1] + + dataset.attrs["area"].pixel_size_y) + y_0) + elif "+x_0=0" in proj4_string and "+y_0=0" in proj4_string: + proj4_string = proj4_string.replace("+x_0=0", "+x_0=%.6f" % ( + (-dataset.attrs["area"].area_extent[0] + + dataset.attrs["area"].pixel_size_x) + x_0)) + proj4_string = proj4_string.replace("+y_0=0", "+y_0=%.6f" % ( + (-dataset.attrs["area"].area_extent[1] + + dataset.attrs["area"].pixel_size_y) + y_0)) return proj4_string def _convert_epsg_to_proj(self, proj4_string, x_0): - if 'EPSG:32631' in proj4_string: + if "EPSG:32631" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32631", "+proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 - elif 'EPSG:32632' in proj4_string: + elif "EPSG:32632" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32632", "+proj=etmerc +lat_0=0 +lon_0=9 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 - elif 'EPSG:32633' in proj4_string: + elif "EPSG:32633" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32633", "+proj=etmerc +lat_0=0 +lon_0=15 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 - elif 'EPSG:32634' in proj4_string: + elif "EPSG:32634" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32634", "+proj=etmerc +lat_0=0 +lon_0=21 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 - elif 'EPSG:32635' in proj4_string: + elif "EPSG:32635" in proj4_string: proj4_string = proj4_string.replace("+init=EPSG:32635", "+proj=etmerc +lat_0=0 +lon_0=27 +k=0.9996 +ellps=WGS84 +datum=WGS84") x_0 = 500000 - elif 'EPSG' in proj4_string: + elif "EPSG" in proj4_string: LOG.warning("EPSG used in proj string but not converted. Please add this in code") return proj4_string, x_0 def _add_pixel_sizes(self, datasets, first_dataset): _image_description = "" if isinstance(datasets, list): - _image_description += ' Ax: %.6f' % ( - first_dataset.attrs['area'].pixel_size_x / 1000.) - _image_description += ' Ay: %.6f' % ( - first_dataset.attrs['area'].pixel_size_y / 1000.) + _image_description += " Ax: %.6f" % ( + first_dataset.attrs["area"].pixel_size_x / 1000.) + _image_description += " Ay: %.6f" % ( + first_dataset.attrs["area"].pixel_size_y / 1000.) else: - _image_description += ' Ax: %.6f' % ( - datasets.attrs['area'].pixel_size_x / 1000.) - _image_description += ' Ay: %.6f' % ( - datasets.attrs['area'].pixel_size_y / 1000.) + _image_description += " Ax: %.6f" % ( + datasets.attrs["area"].pixel_size_x / 1000.) + _image_description += " Ay: %.6f" % ( + datasets.attrs["area"].pixel_size_y / 1000.) return _image_description @@ -326,21 +326,21 @@ def _add_corners(self, datasets, first_dataset): # Therefor use the center of the upper left pixel. _image_description = "" if isinstance(datasets, list): - _image_description += ' Bx: %.6f' % ( - first_dataset.attrs['area'].area_extent[0] / 1000. + - first_dataset.attrs['area'].pixel_size_x / 1000. / 2.) # LL_x - _image_description += ' By: %.6f' % ( - first_dataset.attrs['area'].area_extent[3] / 1000. - - first_dataset.attrs['area'].pixel_size_y / 1000. / 2.) # UR_y + _image_description += " Bx: %.6f" % ( + first_dataset.attrs["area"].area_extent[0] / 1000. + + first_dataset.attrs["area"].pixel_size_x / 1000. / 2.) # LL_x + _image_description += " By: %.6f" % ( + first_dataset.attrs["area"].area_extent[3] / 1000. - + first_dataset.attrs["area"].pixel_size_y / 1000. / 2.) # UR_y else: - _image_description += ' Bx: %.6f' % ( - datasets.attrs['area'].area_extent[0] / 1000. + - datasets.attrs['area'].pixel_size_x / 1000. / 2.) # LL_x - _image_description += ' By: %.6f' % ( - datasets.attrs['area'].area_extent[3] / 1000. - - datasets.attrs['area'].pixel_size_y / 1000. / 2.) # UR_y - - _image_description += '\n' + _image_description += " Bx: %.6f" % ( + datasets.attrs["area"].area_extent[0] / 1000. + + datasets.attrs["area"].pixel_size_x / 1000. / 2.) # LL_x + _image_description += " By: %.6f" % ( + datasets.attrs["area"].area_extent[3] / 1000. - + datasets.attrs["area"].pixel_size_y / 1000. / 2.) # UR_y + + _image_description += "\n" return _image_description def _add_calibration_datasets(self, ch, datasets, reverse_offset, reverse_scale, decimals): @@ -351,34 +351,34 @@ def _add_calibration_datasets(self, ch, datasets, reverse_offset, reverse_scale, found_calibration = False skip_calibration = False ds_list = datasets - if not isinstance(datasets, list) and 'bands' not in datasets.sizes: + if not isinstance(datasets, list) and "bands" not in datasets.sizes: ds_list = [datasets] for i, ds in enumerate(ds_list): - if ('prerequisites' in ds.attrs and - isinstance(ds.attrs['prerequisites'], list) and - len(ds.attrs['prerequisites']) >= i + 1 and - isinstance(ds.attrs['prerequisites'][i], (DataQuery, DataID))): - if ds.attrs['prerequisites'][i].get('name') == str(ch): - if ds.attrs['prerequisites'][i].get('calibration') == 'RADIANCE': + if ("prerequisites" in ds.attrs and + isinstance(ds.attrs["prerequisites"], list) and + len(ds.attrs["prerequisites"]) >= i + 1 and + isinstance(ds.attrs["prerequisites"][i], (DataQuery, DataID))): + if ds.attrs["prerequisites"][i].get("name") == str(ch): + if ds.attrs["prerequisites"][i].get("calibration") == "RADIANCE": raise NotImplementedError( "Mitiff radiance calibration not implemented.") # _table_calibration += ', Radiance, ' # _table_calibration += '[W/m²/µm/sr]' # _decimals = 8 - elif ds.attrs['prerequisites'][i].get('calibration') == 'brightness_temperature': + elif ds.attrs["prerequisites"][i].get("calibration") == "brightness_temperature": found_calibration = True - _table_calibration += ', BT, ' + _table_calibration += ", BT, " _table_calibration += "\N{DEGREE SIGN}" - _table_calibration += u'[C]' + _table_calibration += u"[C]" _reverse_offset = 255. _reverse_scale = -1. _decimals = 2 - elif ds.attrs['prerequisites'][i].get('calibration') == 'reflectance': + elif ds.attrs["prerequisites"][i].get("calibration") == "reflectance": found_calibration = True - _table_calibration += ', Reflectance(Albedo), ' - _table_calibration += '[%]' + _table_calibration += ", Reflectance(Albedo), " + _table_calibration += "[%]" _decimals = 2 else: LOG.warning("Unknown calib type. Must be Radiance, Reflectance or BT.") @@ -399,13 +399,13 @@ def _add_calibration_datasets(self, ch, datasets, reverse_offset, reverse_scale, def _add_palette_info(self, datasets, palette_unit, palette_description, **kwargs): # mitiff key word for palette interpretion - _palette = '\n COLOR INFO:\n' + _palette = "\n COLOR INFO:\n" # mitiff info for the unit of the interpretion - _palette += ' {}\n'.format(palette_unit) + _palette += " {}\n".format(palette_unit) # The length of the palette description as needed by mitiff in DIANA - _palette += ' {}\n'.format(len(palette_description)) + _palette += " {}\n".format(len(palette_description)) for desc in palette_description: - _palette += ' {}\n'.format(desc) + _palette += " {}\n".format(desc) return _palette def _add_calibration(self, channels, cns, datasets, **kwargs): @@ -419,10 +419,10 @@ def _add_calibration(self, channels, cns, datasets, **kwargs): if palette: raise NotImplementedError("Mitiff palette saving is not implemented.") else: - _table_calibration += 'Table_calibration: ' + _table_calibration += "Table_calibration: " try: _table_calibration += str( - self.mitiff_config[kwargs['sensor']][cns.get(ch, ch)]['alias']) + self.mitiff_config[kwargs["sensor"]][cns.get(ch, ch)]["alias"]) except KeyError: _table_calibration += str(ch) @@ -435,18 +435,18 @@ def _add_calibration(self, channels, cns, datasets, **kwargs): _table_calibration += __table_calibration if not skip_calibration: - _table_calibration += ', 8, [ ' + _table_calibration += ", 8, [ " for val in range(0, 256): # Comma separated list of values - _table_calibration += '{0:.{1}f} '.format((float(self.mitiff_config[ - kwargs['sensor']][cns.get(ch, ch)]['min-val']) + + _table_calibration += "{0:.{1}f} ".format((float(self.mitiff_config[ + kwargs["sensor"]][cns.get(ch, ch)]["min-val"]) + ((_reverse_offset + _reverse_scale * val) * - (float(self.mitiff_config[kwargs['sensor']][cns.get(ch, ch)]['max-val']) - - float(self.mitiff_config[kwargs['sensor']][cns.get(ch, ch)]['min-val']))) / 255.), + (float(self.mitiff_config[kwargs["sensor"]][cns.get(ch, ch)]["max-val"]) - + float(self.mitiff_config[kwargs["sensor"]][cns.get(ch, ch)]["min-val"]))) / 255.), _decimals) # _table_calibration += '0.00000000 ' - _table_calibration += ']\n\n' + _table_calibration += "]\n\n" else: _table_calibration = "" @@ -498,14 +498,14 @@ def _make_image_description(self, datasets, **kwargs): []\n\n """ - translate_platform_name = {'metop01': 'Metop-B', - 'metop02': 'Metop-A', - 'metop03': 'Metop-C', - 'noaa15': 'NOAA-15', - 'noaa16': 'NOAA-16', - 'noaa17': 'NOAA-17', - 'noaa18': 'NOAA-18', - 'noaa19': 'NOAA-19'} + translate_platform_name = {"metop01": "Metop-B", + "metop02": "Metop-A", + "metop03": "Metop-C", + "noaa15": "NOAA-15", + "noaa16": "NOAA-16", + "noaa17": "NOAA-17", + "noaa18": "NOAA-18", + "noaa19": "NOAA-19"} first_dataset = datasets if isinstance(datasets, list): @@ -514,40 +514,40 @@ def _make_image_description(self, datasets, **kwargs): _platform_name = self._get_platform_name(first_dataset, translate_platform_name, kwargs) - _image_description = '' - _image_description.encode('utf-8') + _image_description = "" + _image_description.encode("utf-8") - _image_description += ' Satellite: ' + _image_description += " Satellite: " if _platform_name is not None: _image_description += _platform_name - _image_description += '\n' + _image_description += "\n" - _image_description += ' Date and Time: ' + _image_description += " Date and Time: " # Select earliest start_time first = True earliest = 0 for dataset in datasets: if first: - earliest = dataset.attrs['start_time'] + earliest = dataset.attrs["start_time"] else: - if dataset.attrs['start_time'] < earliest: - earliest = dataset.attrs['start_time'] + if dataset.attrs["start_time"] < earliest: + earliest = dataset.attrs["start_time"] first = False LOG.debug("earliest start_time: %s", earliest) _image_description += earliest.strftime("%H:%M %d/%m-%Y\n") - _image_description += ' SatDir: 0\n' + _image_description += " SatDir: 0\n" - _image_description += ' Channels: ' + _image_description += " Channels: " _image_description += self._get_dataset_len(datasets) - _image_description += ' In this file: ' + _image_description += " In this file: " channels = self._make_channel_list(datasets, **kwargs) try: - cns = self.translate_channel_name.get(kwargs['sensor'], {}) + cns = self.translate_channel_name.get(kwargs["sensor"], {}) except KeyError: pass @@ -555,25 +555,25 @@ def _make_image_description(self, datasets, **kwargs): _image_description += self._add_sizes(datasets, first_dataset) - _image_description += ' Map projection: Stereographic\n' + _image_description += " Map projection: Stereographic\n" _image_description += self._add_proj4_string(datasets, first_dataset) - _image_description += ' TrueLat: 60N\n' - _image_description += ' GridRot: 0\n' + _image_description += " TrueLat: 60N\n" + _image_description += " GridRot: 0\n" - _image_description += ' Xunit:1000 m Yunit: 1000 m\n' + _image_description += " Xunit:1000 m Yunit: 1000 m\n" - _image_description += ' NPX: %.6f' % (0) - _image_description += ' NPY: %.6f' % (0) + '\n' + _image_description += " NPX: %.6f" % (0) + _image_description += " NPY: %.6f" % (0) + "\n" _image_description += self._add_pixel_sizes(datasets, first_dataset) _image_description += self._add_corners(datasets, first_dataset) if isinstance(datasets, list): - LOG.debug("Area extent: %s", first_dataset.attrs['area'].area_extent) + LOG.debug("Area extent: %s", first_dataset.attrs["area"].area_extent) else: - LOG.debug("Area extent: %s", datasets.attrs['area'].area_extent) + LOG.debug("Area extent: %s", datasets.attrs["area"].area_extent) if self.palette: LOG.debug("Doing palette image") @@ -587,24 +587,24 @@ def _get_dataset_len(self, datasets): if isinstance(datasets, list): LOG.debug("len datasets: %s", len(datasets)) dataset_len = str(len(datasets)) - elif 'bands' in datasets.sizes: - LOG.debug("len datasets: %s", datasets.sizes['bands']) - dataset_len = str(datasets.sizes['bands']) + elif "bands" in datasets.sizes: + LOG.debug("len datasets: %s", datasets.sizes["bands"]) + dataset_len = str(datasets.sizes["bands"]) elif len(datasets.sizes) == 2: LOG.debug("len datasets: 1") - dataset_len = '1' + dataset_len = "1" else: dataset_len = "" return dataset_len def _get_platform_name(self, first_dataset, translate_platform_name, kwargs): - if 'platform_name' in first_dataset.attrs: + if "platform_name" in first_dataset.attrs: _platform_name = translate_platform_name.get( - first_dataset.attrs['platform_name'], - first_dataset.attrs['platform_name']) - elif 'platform_name' in kwargs: + first_dataset.attrs["platform_name"], + first_dataset.attrs["platform_name"]) + elif "platform_name" in kwargs: _platform_name = translate_platform_name.get( - kwargs['platform_name'], kwargs['platform_name']) + kwargs["platform_name"], kwargs["platform_name"]) else: _platform_name = None return _platform_name @@ -612,7 +612,7 @@ def _get_platform_name(self, first_dataset, translate_platform_name, kwargs): def _calibrate_data(self, dataset, calibration, min_val, max_val): reverse_offset = 0. reverse_scale = 1. - if calibration == 'brightness_temperature': + if calibration == "brightness_temperature": # If data is brightness temperature, the data must be inverted. reverse_offset = 255. reverse_scale = -1. @@ -631,44 +631,44 @@ def _save_as_palette(self, datasets, tmp_gen_filename, tiffinfo, **kwargs): # The value of the component is used as an index into the red, green and blue curves # in the ColorMap field to retrieve an RGB triplet that defines the color. When # PhotometricInterpretation=3 is used, ColorMap must be present and SamplesPerPixel must be 1. - tiffinfo[270] = tiffinfo[270].decode('utf-8') + tiffinfo[270] = tiffinfo[270].decode("utf-8") - img = Image.fromarray(datasets.data.astype(np.uint8), mode='P') - if 'palette_color_map' in kwargs: - img.putpalette(ImagePalette.ImagePalette('RGB', kwargs['palette_color_map'])) + img = Image.fromarray(datasets.data.astype(np.uint8), mode="P") + if "palette_color_map" in kwargs: + img.putpalette(ImagePalette.ImagePalette("RGB", kwargs["palette_color_map"])) else: LOG.error("In a mitiff palette image a color map must be provided: palette_color_map is missing.") return - img.save(tmp_gen_filename, compression='raw', compress_level=9, tiffinfo=tiffinfo) + img.save(tmp_gen_filename, compression="raw", compress_level=9, tiffinfo=tiffinfo) def _save_as_enhanced(self, datasets, tmp_gen_filename, **kwargs): """Save datasets as an enhanced RGB image.""" img = get_enhanced_image(datasets.squeeze(), enhance=self.enhancer) tiffinfo = {} - if 'bands' in img.data.sizes and 'bands' not in datasets.sizes: + if "bands" in img.data.sizes and "bands" not in datasets.sizes: LOG.debug("Datasets without 'bands' become image with 'bands' due to enhancement.") LOG.debug("Needs to regenerate mitiff image description") image_description = self._make_image_description(img.data, **kwargs) - tiffinfo[IMAGEDESCRIPTION] = (image_description).encode('utf-8') + tiffinfo[IMAGEDESCRIPTION] = (image_description).encode("utf-8") mitiff_frames = [] - for band in img.data['bands']: + for band in img.data["bands"]: chn = img.data.sel(bands=band) data = chn.values.clip(0, 1) * 254. + 1 data = data.clip(0, 255) - mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode='L')) + mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode="L")) mitiff_frames[0].save(tmp_gen_filename, save_all=True, append_images=mitiff_frames[1:], - compression='raw', compress_level=9, tiffinfo=tiffinfo) + compression="raw", compress_level=9, tiffinfo=tiffinfo) def _generate_intermediate_filename(self, gen_filename): """Replace mitiff ext because pillow doesn't recognise the file type.""" bs, ex = os.path.splitext(gen_filename) tmp_gen_filename = gen_filename - if ex.endswith('mitiff'): + if ex.endswith("mitiff"): bd = os.path.dirname(bs) bn = os.path.basename(bs) - tmp_gen_filename = os.path.join(bd, '.' + bn + '.tif') + tmp_gen_filename = os.path.join(bd, "." + bn + ".tif") return tmp_gen_filename def _save_datasets_as_mitiff(self, datasets, image_description, @@ -680,25 +680,25 @@ def _save_datasets_as_mitiff(self, datasets, image_description, """ tmp_gen_filename = self._generate_intermediate_filename(gen_filename) tiffinfo = {} - tiffinfo[IMAGEDESCRIPTION] = (image_description).encode('latin-1') + tiffinfo[IMAGEDESCRIPTION] = (image_description).encode("latin-1") - cns = self.translate_channel_name.get(kwargs['sensor'], {}) + cns = self.translate_channel_name.get(kwargs["sensor"], {}) if isinstance(datasets, list): LOG.debug("Saving datasets as list") mitiff_frames = [] - for _cn in self.channel_order[kwargs['sensor']]: + for _cn in self.channel_order[kwargs["sensor"]]: for dataset in datasets: - if dataset.attrs['name'] == _cn: + if dataset.attrs["name"] == _cn: # Need to possible translate channels names from satpy to mitiff - cn = cns.get(dataset.attrs['name'], dataset.attrs['name']) - data = self._calibrate_data(dataset, dataset.attrs['calibration'], - self.mitiff_config[kwargs['sensor']][cn]['min-val'], - self.mitiff_config[kwargs['sensor']][cn]['max-val']) - mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode='L')) + cn = cns.get(dataset.attrs["name"], dataset.attrs["name"]) + data = self._calibrate_data(dataset, dataset.attrs["calibration"], + self.mitiff_config[kwargs["sensor"]][cn]["min-val"], + self.mitiff_config[kwargs["sensor"]][cn]["max-val"]) + mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode="L")) break mitiff_frames[0].save(tmp_gen_filename, save_all=True, append_images=mitiff_frames[1:], - compression='raw', compress_level=9, tiffinfo=tiffinfo) - elif 'dataset' in datasets.attrs['name']: + compression="raw", compress_level=9, tiffinfo=tiffinfo) + elif "dataset" in datasets.attrs["name"]: LOG.debug("Saving dataset as single dataset.") self._save_single_dataset(datasets, cns, tmp_gen_filename, tiffinfo, kwargs) elif self.palette: @@ -710,35 +710,35 @@ def _save_datasets_as_mitiff(self, datasets, image_description, os.rename(tmp_gen_filename, gen_filename) def _save_single_dataset(self, datasets, cns, tmp_gen_filename, tiffinfo, kwargs): - LOG.debug("Saving %s as a dataset.", datasets.attrs['name']) - if len(datasets.dims) == 2 and (all('bands' not in i for i in datasets.dims)): + LOG.debug("Saving %s as a dataset.", datasets.attrs["name"]) + if len(datasets.dims) == 2 and (all("bands" not in i for i in datasets.dims)): # Special case with only one channel ie. no bands # Need to possible translate channels names from satpy to mitiff # Note the last index is a tuple index. - cn = cns.get(datasets.attrs['prerequisites'][0]['name'], - datasets.attrs['prerequisites'][0]['name']) - data = self._calibrate_data(datasets, datasets.attrs['prerequisites'][0].get('calibration'), - self.mitiff_config[kwargs['sensor']][cn]['min-val'], - self.mitiff_config[kwargs['sensor']][cn]['max-val']) + cn = cns.get(datasets.attrs["prerequisites"][0]["name"], + datasets.attrs["prerequisites"][0]["name"]) + data = self._calibrate_data(datasets, datasets.attrs["prerequisites"][0].get("calibration"), + self.mitiff_config[kwargs["sensor"]][cn]["min-val"], + self.mitiff_config[kwargs["sensor"]][cn]["max-val"]) Image.fromarray(data.astype(np.uint8)).save(tmp_gen_filename, compression="raw", compress_level=9, tiffinfo=tiffinfo) else: mitiff_frames = [] - for _cn_i, _cn in enumerate(self.channel_order[kwargs['sensor']]): - for band in datasets['bands']: + for _cn_i, _cn in enumerate(self.channel_order[kwargs["sensor"]]): + for band in datasets["bands"]: if band == _cn: chn = datasets.sel(bands=band) # Need to possible translate channels names from satpy to mitiff # Note the last index is a tuple index. - cn = cns.get(chn.attrs['prerequisites'][_cn_i]['name'], - chn.attrs['prerequisites'][_cn_i]['name']) - data = self._calibrate_data(chn, chn.attrs['prerequisites'][_cn_i].get('calibration'), - self.mitiff_config[kwargs['sensor']][cn]['min-val'], - self.mitiff_config[kwargs['sensor']][cn]['max-val']) + cn = cns.get(chn.attrs["prerequisites"][_cn_i]["name"], + chn.attrs["prerequisites"][_cn_i]["name"]) + data = self._calibrate_data(chn, chn.attrs["prerequisites"][_cn_i].get("calibration"), + self.mitiff_config[kwargs["sensor"]][cn]["min-val"], + self.mitiff_config[kwargs["sensor"]][cn]["max-val"]) - mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode='L')) + mitiff_frames.append(Image.fromarray(data.astype(np.uint8), mode="L")) break mitiff_frames[0].save(tmp_gen_filename, save_all=True, append_images=mitiff_frames[1:], - compression='raw', compress_level=9, tiffinfo=tiffinfo) + compression="raw", compress_level=9, tiffinfo=tiffinfo) diff --git a/satpy/writers/utils.py b/satpy/writers/utils.py index 3308115ff9..fe9ff00625 100644 --- a/satpy/writers/utils.py +++ b/satpy/writers/utils.py @@ -18,7 +18,7 @@ """Writer utilities.""" -def flatten_dict(d, parent_key='', sep='_'): +def flatten_dict(d, parent_key="", sep="_"): """Flatten a nested dictionary. Based on https://stackoverflow.com/a/6027615/5703449 diff --git a/setup.py b/setup.py index 555f299b19..cd1c43422e 100644 --- a/setup.py +++ b/setup.py @@ -22,68 +22,68 @@ from setuptools import find_packages, setup -requires = ['numpy >=1.21', 'pillow', 'pyresample >=1.24.0', 'trollsift', - 'trollimage >=1.20', 'pykdtree', 'pyyaml >=5.1', 'xarray >=0.10.1, !=0.13.0', - 'dask[array] >=0.17.1', 'pyproj>=2.2', 'zarr', 'donfig', 'appdirs', - 'packaging', 'pooch', 'pyorbital'] +requires = ["numpy >=1.21", "pillow", "pyresample >=1.24.0", "trollsift", + "trollimage >=1.20", "pykdtree", "pyyaml >=5.1", "xarray >=0.10.1, !=0.13.0", + "dask[array] >=0.17.1", "pyproj>=2.2", "zarr", "donfig", "appdirs", + "packaging", "pooch", "pyorbital"] -test_requires = ['behave', 'h5py', 'netCDF4', 'pyhdf', 'imageio', - 'rasterio', 'geoviews', 'trollimage', 'fsspec', 'bottleneck', - 'rioxarray', 'pytest', 'pytest-lazy-fixture', 'defusedxml', - 's3fs', 'eccodes', 'h5netcdf', 'xarray-datatree', - 'skyfield', 'ephem', 'pint-xarray', 'astropy', 'dask-image'] +test_requires = ["behave", "h5py", "netCDF4", "pyhdf", "imageio", + "rasterio", "geoviews", "trollimage", "fsspec", "bottleneck", + "rioxarray", "pytest", "pytest-lazy-fixture", "defusedxml", + "s3fs", "eccodes", "h5netcdf", "xarray-datatree", + "skyfield", "ephem", "pint-xarray", "astropy", "dask-image"] extras_require = { # Readers: - 'avhrr_l1b_gaclac': ['pygac >= 1.3.0'], - 'modis_l1b': ['pyhdf', 'python-geotiepoints >= 1.1.7'], - 'geocat': ['pyhdf'], - 'acspo': ['netCDF4 >= 1.1.8'], - 'clavrx': ['netCDF4 >= 1.1.8'], - 'viirs_l1b': ['netCDF4 >= 1.1.8'], - 'viirs_sdr': ['h5py >= 2.7.0'], - 'viirs_compact': ['h5py >= 2.7.0'], - 'omps_edr': ['h5py >= 2.7.0'], - 'amsr2_l1b': ['h5py >= 2.7.0'], - 'hrpt': ['pyorbital >= 1.3.1', 'pygac', 'python-geotiepoints >= 1.1.7'], - 'hrit_msg': ['pytroll-schedule'], - 'msi_safe': ['rioxarray', "bottleneck", "python-geotiepoints"], - 'nc_nwcsaf_msg': ['netCDF4 >= 1.1.8'], - 'sar_c': ['python-geotiepoints >= 1.1.7', 'rasterio', 'rioxarray', 'defusedxml'], - 'abi_l1b': ['h5netcdf'], - 'seviri_l1b_hrit': ['pyorbital >= 1.3.1'], - 'seviri_l1b_native': ['pyorbital >= 1.3.1'], - 'seviri_l1b_nc': ['pyorbital >= 1.3.1', 'netCDF4 >= 1.1.8'], - 'seviri_l2_bufr': ['eccodes'], - 'seviri_l2_grib': ['eccodes'], - 'hsaf_grib': ['pygrib'], - 'remote_reading': ['fsspec'], - 'insat_3d': ['xarray-datatree'], - 'gms5-vissr_l1b': ["numba"], + "avhrr_l1b_gaclac": ["pygac >= 1.3.0"], + "modis_l1b": ["pyhdf", "python-geotiepoints >= 1.1.7"], + "geocat": ["pyhdf"], + "acspo": ["netCDF4 >= 1.1.8"], + "clavrx": ["netCDF4 >= 1.1.8"], + "viirs_l1b": ["netCDF4 >= 1.1.8"], + "viirs_sdr": ["h5py >= 2.7.0"], + "viirs_compact": ["h5py >= 2.7.0"], + "omps_edr": ["h5py >= 2.7.0"], + "amsr2_l1b": ["h5py >= 2.7.0"], + "hrpt": ["pyorbital >= 1.3.1", "pygac", "python-geotiepoints >= 1.1.7"], + "hrit_msg": ["pytroll-schedule"], + "msi_safe": ["rioxarray", "bottleneck", "python-geotiepoints"], + "nc_nwcsaf_msg": ["netCDF4 >= 1.1.8"], + "sar_c": ["python-geotiepoints >= 1.1.7", "rasterio", "rioxarray", "defusedxml"], + "abi_l1b": ["h5netcdf"], + "seviri_l1b_hrit": ["pyorbital >= 1.3.1"], + "seviri_l1b_native": ["pyorbital >= 1.3.1"], + "seviri_l1b_nc": ["pyorbital >= 1.3.1", "netCDF4 >= 1.1.8"], + "seviri_l2_bufr": ["eccodes"], + "seviri_l2_grib": ["eccodes"], + "hsaf_grib": ["pygrib"], + "remote_reading": ["fsspec"], + "insat_3d": ["xarray-datatree"], + "gms5-vissr_l1b": ["numba"], # Writers: - 'cf': ['h5netcdf >= 0.7.3'], - 'awips_tiled': ['netCDF4 >= 1.1.8'], - 'geotiff': ['rasterio', 'trollimage[geotiff]'], - 'ninjo': ['pyninjotiff', 'pint'], + "cf": ["h5netcdf >= 0.7.3"], + "awips_tiled": ["netCDF4 >= 1.1.8"], + "geotiff": ["rasterio", "trollimage[geotiff]"], + "ninjo": ["pyninjotiff", "pint"], "units": ["pint-xarray"], # Composites/Modifiers: - 'rayleigh': ['pyspectral >= 0.10.1'], - 'angles': ['pyorbital >= 1.3.1'], - 'filters': ['dask-image'], + "rayleigh": ["pyspectral >= 0.10.1"], + "angles": ["pyorbital >= 1.3.1"], + "filters": ["dask-image"], # MultiScene: - 'animations': ['imageio'], + "animations": ["imageio"], # Documentation: - 'doc': ['sphinx', 'sphinx_rtd_theme', 'sphinxcontrib-apidoc'], + "doc": ["sphinx", "sphinx_rtd_theme", "sphinxcontrib-apidoc"], # Other - 'geoviews': ['geoviews'], - 'overlays': ['pycoast', 'pydecorate'], - 'satpos_from_tle': ['skyfield', 'astropy'], - 'tests': test_requires, + "geoviews": ["geoviews"], + "overlays": ["pycoast", "pydecorate"], + "satpos_from_tle": ["skyfield", "astropy"], + "tests": test_requires, } all_extras = [] for extra_deps in extras_require.values(): all_extras.extend(extra_deps) -extras_require['all'] = list(set(all_extras)) +extras_require["all"] = list(set(all_extras)) def _config_data_files(base_dirs, extensions=(".cfg", )): @@ -110,21 +110,21 @@ def _config_data_files(base_dirs, extensions=(".cfg", )): entry_points = { - 'console_scripts': [ - 'satpy_retrieve_all_aux_data=satpy.aux_download:retrieve_all_cmd', + "console_scripts": [ + "satpy_retrieve_all_aux_data=satpy.aux_download:retrieve_all_cmd", ], } -NAME = 'satpy' -with open('README.rst', 'r') as readme: +NAME = "satpy" +with open("README.rst", "r") as readme: README = readme.read() setup(name=NAME, - description='Python package for earth-observing satellite data processing', + description="Python package for earth-observing satellite data processing", long_description=README, - author='The Pytroll Team', - author_email='pytroll@googlegroups.com', + author="The Pytroll Team", + author_email="pytroll@googlegroups.com", classifiers=["Development Status :: 5 - Production/Stable", "Intended Audience :: Science/Research", "License :: OSI Approved :: GNU General Public License v3 " + @@ -147,23 +147,23 @@ def _config_data_files(base_dirs, extensions=(".cfg", )): packages=find_packages(), # Always use forward '/', even on Windows # See https://setuptools.readthedocs.io/en/latest/userguide/datafiles.html#data-files-support - package_data={'satpy': ['etc/geo_image.cfg', - 'etc/areas.yaml', - 'etc/satpy.cfg', - 'etc/himawari-8.cfg', - 'etc/eps_avhrrl1b_6.5.xml', - 'etc/readers/*.yaml', - 'etc/writers/*.yaml', - 'etc/composites/*.yaml', - 'etc/enhancements/*.cfg', - 'etc/enhancements/*.yaml', - 'tests/etc/readers/*.yaml', - 'tests/etc/composites/*.yaml', - 'tests/etc/writers/*.yaml', + package_data={"satpy": ["etc/geo_image.cfg", + "etc/areas.yaml", + "etc/satpy.cfg", + "etc/himawari-8.cfg", + "etc/eps_avhrrl1b_6.5.xml", + "etc/readers/*.yaml", + "etc/writers/*.yaml", + "etc/composites/*.yaml", + "etc/enhancements/*.cfg", + "etc/enhancements/*.yaml", + "tests/etc/readers/*.yaml", + "tests/etc/composites/*.yaml", + "tests/etc/writers/*.yaml", ]}, zip_safe=False, install_requires=requires, - python_requires='>=3.9', + python_requires=">=3.9", extras_require=extras_require, entry_points=entry_points, ) diff --git a/utils/convert_to_ninjotiff.py b/utils/convert_to_ninjotiff.py index e457ee35e3..2189a11dec 100644 --- a/utils/convert_to_ninjotiff.py +++ b/utils/convert_to_ninjotiff.py @@ -38,20 +38,20 @@ debug_on() -parser = argparse.ArgumentParser(description='Turn an image into a NinjoTiff.') -parser.add_argument('--cfg', dest='cfg', action="store", +parser = argparse.ArgumentParser(description="Turn an image into a NinjoTiff.") +parser.add_argument("--cfg", dest="cfg", action="store", help="YAML configuration as an alternative to the command line input for NinJo metadata.") -parser.add_argument('--input_dir', dest='input_dir', action="store", +parser.add_argument("--input_dir", dest="input_dir", action="store", help="Directory with input data, that must contain a timestamp in the filename.") -parser.add_argument('--chan_id', dest='chan_id', action="store", help="Channel ID", default="9999") -parser.add_argument('--sat_id', dest='sat_id', action="store", help="Satellite ID", default="8888") -parser.add_argument('--data_cat', dest='data_cat', action="store", +parser.add_argument("--chan_id", dest="chan_id", action="store", help="Channel ID", default="9999") +parser.add_argument("--sat_id", dest="sat_id", action="store", help="Satellite ID", default="8888") +parser.add_argument("--data_cat", dest="data_cat", action="store", help="Category of data (one of GORN, GPRN, PORN)", default="GORN") -parser.add_argument('--area', dest='areadef', action="store", +parser.add_argument("--area", dest="areadef", action="store", help="Area name, the definition must exist in your areas configuration file", default="nrEURO1km_NPOL_COALeqc") -parser.add_argument('--ph_unit', dest='ph_unit', action="store", help="Physical unit", default="CELSIUS") -parser.add_argument('--data_src', dest='data_src', action="store", help="Data source", default="EUMETCAST") +parser.add_argument("--ph_unit", dest="ph_unit", action="store", help="Physical unit", default="CELSIUS") +parser.add_argument("--data_src", dest="data_src", action="store", help="Data source", default="EUMETCAST") args = parser.parse_args() if (args.input_dir is not None): @@ -59,21 +59,21 @@ cfg = vars(args) if (args.cfg is not None): - with open(args.cfg, 'r') as ymlfile: + with open(args.cfg, "r") as ymlfile: cfg = yaml.load(ymlfile, Loader=UnsafeLoader) narea = get_area_def(args.areadef) global_data = Scene(reader="generic_image") -global_data.load(['image']) +global_data.load(["image"]) -global_data['image'].info['area'] = narea -fname = global_data['image'].info['filename'] +global_data["image"].info["area"] = narea +fname = global_data["image"].info["filename"] ofname = fname[:-3] + "tif" # global_data.save_dataset('image', filename="out.png", writer="simple_image") -global_data.save_dataset('image', filename=ofname, writer="ninjotiff", - sat_id=cfg['sat_id'], - chan_id=cfg['chan_id'], - data_cat=cfg['data_cat'], - data_source=cfg['data_src'], - physic_unit=cfg['ph_unit']) +global_data.save_dataset("image", filename=ofname, writer="ninjotiff", + sat_id=cfg["sat_id"], + chan_id=cfg["chan_id"], + data_cat=cfg["data_cat"], + data_source=cfg["data_src"], + physic_unit=cfg["ph_unit"]) diff --git a/utils/coord2area_def.py b/utils/coord2area_def.py index e3727b9aba..8b6aa0478b 100644 --- a/utils/coord2area_def.py +++ b/utils/coord2area_def.py @@ -66,7 +66,7 @@ from pyproj import Proj -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("name", @@ -126,7 +126,7 @@ " +".join(("proj=" + proj + ",lat_0=" + str(lat_0) + ",lon_0=" + str(lon_0) + ",ellps=WGS84").split(",")) - print('### ' + proj4_string) + print("### " + proj4_string) print() print(name + ":") print(" description: " + name) @@ -146,14 +146,14 @@ sys.exit(0) from PIL import Image from pycoast import ContourWriterAGG - img = Image.new('RGB', (xsize, ysize)) + img = Image.new("RGB", (xsize, ysize)) area_def = (proj4_string, area_extent) cw = ContourWriterAGG(args.shapes) cw.add_coastlines(img, (proj4_string, area_extent), - resolution='l', width=0.5) + resolution="l", width=0.5) - cw.add_grid(img, area_def, (10.0, 10.0), (2.0, 2.0), write_text=False, outline='white', outline_opacity=175, - width=1.0, minor_outline='white', minor_outline_opacity=175, minor_width=0.2, minor_is_tick=False) + cw.add_grid(img, area_def, (10.0, 10.0), (2.0, 2.0), write_text=False, outline="white", outline_opacity=175, + width=1.0, minor_outline="white", minor_outline_opacity=175, minor_width=0.2, minor_is_tick=False) img.show() diff --git a/utils/fetch_avhrr_calcoeffs.py b/utils/fetch_avhrr_calcoeffs.py index 7bc49ba8db..f73975df95 100644 --- a/utils/fetch_avhrr_calcoeffs.py +++ b/utils/fetch_avhrr_calcoeffs.py @@ -53,32 +53,32 @@ def get_page(url): """Retrieve the given page.""" - return urllib2.urlopen(url).read() + return urllib2.urlopen(url).read() # nosec def get_coeffs(page): """Parse coefficients from the page.""" coeffs = {} - coeffs['datetime'] = [] - coeffs['slope1'] = [] - coeffs['intercept1'] = [] - coeffs['slope2'] = [] - coeffs['intercept2'] = [] + coeffs["datetime"] = [] + coeffs["slope1"] = [] + coeffs["intercept1"] = [] + coeffs["slope2"] = [] + coeffs["intercept2"] = [] slope1_idx, intercept1_idx, slope2_idx, intercept2_idx = \ None, None, None, None date_idx = 0 - for row in page.lower().split('\n'): + for row in page.lower().split("\n"): row = row.split() if len(row) == 0: continue - if row[0] == 'update': + if row[0] == "update": # Get the column indices from the header line - slope1_idx = row.index('slope_lo') - intercept1_idx = row.index('int_lo') - slope2_idx = row.index('slope_hi') - intercept2_idx = row.index('int_hi') + slope1_idx = row.index("slope_lo") + intercept1_idx = row.index("int_lo") + slope2_idx = row.index("slope_hi") + intercept2_idx = row.index("int_hi") continue if slope1_idx is None: @@ -94,11 +94,11 @@ def get_coeffs(page): except ValueError: continue - coeffs['datetime'].append([dat.year, dat.month, dat.day]) - coeffs['slope1'].append(float(row[slope1_idx])) - coeffs['intercept1'].append(float(row[intercept1_idx])) - coeffs['slope2'].append(float(row[slope2_idx])) - coeffs['intercept2'].append(float(row[intercept2_idx])) + coeffs["datetime"].append([dat.year, dat.month, dat.day]) + coeffs["slope1"].append(float(row[slope1_idx])) + coeffs["intercept1"].append(float(row[intercept1_idx])) + coeffs["slope2"].append(float(row[slope2_idx])) + coeffs["intercept2"].append(float(row[intercept2_idx])) return coeffs @@ -119,19 +119,19 @@ def get_all_coeffs(): return coeffs -def save_coeffs(coeffs, out_dir=''): +def save_coeffs(coeffs, out_dir=""): """Save calibration coefficients to HDF5 files.""" for platform in coeffs.keys(): fname = os.path.join(out_dir, "%s_calibration_data.h5" % platform) - fid = h5py.File(fname, 'w') + fid = h5py.File(fname, "w") for chan in coeffs[platform].keys(): fid.create_group(chan) - fid[chan]['datetime'] = coeffs[platform][chan]['datetime'] - fid[chan]['slope1'] = coeffs[platform][chan]['slope1'] - fid[chan]['intercept1'] = coeffs[platform][chan]['intercept1'] - fid[chan]['slope2'] = coeffs[platform][chan]['slope2'] - fid[chan]['intercept2'] = coeffs[platform][chan]['intercept2'] + fid[chan]["datetime"] = coeffs[platform][chan]["datetime"] + fid[chan]["slope1"] = coeffs[platform][chan]["slope1"] + fid[chan]["intercept1"] = coeffs[platform][chan]["intercept1"] + fid[chan]["slope2"] = coeffs[platform][chan]["slope2"] + fid[chan]["intercept2"] = coeffs[platform][chan]["intercept2"] fid.close() print("Calibration coefficients saved for %s" % platform)