diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml
index 52cad8a5ce..b3ec45501c 100644
--- a/satpy/etc/enhancements/generic.yaml
+++ b/satpy/etc/enhancements/generic.yaml
@@ -271,6 +271,29 @@ enhancements:
stretch: linear
cutoffs: [0.005, 0.005]
+ four_level_cloud_mask:
+ standard_name: cloud_mask
+ reader: clavrx
+ operations:
+ - name: palettize
+ method: !!python/name:satpy.enhancements.palettize
+ kwargs:
+ palettes:
+ - {'values': [-127,# Fill Value
+ 0, # Clear
+ 1, # Probably Clear
+ 2, # Probably Cloudy
+ 3, # Cloudy
+ ],
+ 'colors': [[0, 0, 0], # black,-127 = Fill Value
+ [94, 79, 162], # blue, 0 = Clear
+ [73, 228, 242], # cyan, 1 = Probably Clear
+ [158, 1, 66], # red, 2 = Probably Cloudy
+ [255, 255, 255], # white, 3 = Cloudy
+ ],
+ 'color_scale': 255,
+ }
+
sar-ice:
standard_name: sar-ice
operations:
diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py
index 4303456c04..c355a1f0ba 100644
--- a/satpy/readers/clavrx.py
+++ b/satpy/readers/clavrx.py
@@ -17,10 +17,12 @@
# satpy. If not, see .
"""Interface to CLAVR-X HDF4 products."""
+from __future__ import annotations
+
import logging
import os
from glob import glob
-from typing import Optional, Union
+from typing import Optional
import netCDF4
import numpy as np
@@ -69,6 +71,20 @@
"abi": 2004,
}
+CHANNEL_ALIASES = {
+ "abi": {"refl_0_47um_nom": {"name": "C01", "wavelength": 0.47, "modifiers": ("sunz_corrected",)},
+ "refl_0_65um_nom": {"name": "C02", "wavelength": 0.64, "modifiers": ("sunz_corrected",)},
+ "refl_0_86um_nom": {"name": "C03", "wavelength": 0.865, "modifiers": ("sunz_corrected",)},
+ "refl_1_38um_nom": {"name": "C04", "wavelength": 1.38, "modifiers": ("sunz_corrected",)},
+ "refl_1_60um_nom": {"name": "C05", "wavelength": 1.61, "modifiers": ("sunz_corrected",)},
+ "refl_2_10um_nom": {"name": "C06", "wavelength": 2.25, "modifiers": ("sunz_corrected",)},
+ },
+ "viirs": {"refl_0_65um_nom": {"name": "I01", "wavelength": 0.64, "modifiers": ("sunz_corrected",)},
+ "refl_1_38um_nom": {"name": "M09", "wavelength": 1.38, "modifiers": ("sunz_corrected",)},
+ "refl_1_60um_nom": {"name": "I03", "wavelength": 1.61, "modifiers": ("sunz_corrected",)}
+ }
+}
+
def _get_sensor(sensor: str) -> str:
"""Get the sensor."""
@@ -94,9 +110,30 @@ def _get_rows_per_scan(sensor: str) -> Optional[int]:
return None
+def _scale_data(data_arr: xr.DataArray | int, scale_factor: float, add_offset: float) -> xr.DataArray:
+ """Scale data, if needed."""
+ scaling_needed = not (scale_factor == 1.0 and add_offset == 0.0)
+ if scaling_needed:
+ data_arr = data_arr * np.float32(scale_factor) + np.float32(add_offset)
+ return data_arr
+
+
class _CLAVRxHelper:
"""A base class for the CLAVRx File Handlers."""
+ @staticmethod
+ def _get_nadir_resolution(sensor, filename_info_resolution):
+ """Get nadir resolution."""
+ for k, v in NADIR_RESOLUTION.items():
+ if sensor.startswith(k):
+ return v
+ if filename_info_resolution is None:
+ return None
+ if isinstance(filename_info_resolution, str) and filename_info_resolution.startswith("m"):
+ return int(filename_info_resolution[:-1])
+ else:
+ return int(filename_info_resolution)
+
@staticmethod
def _remove_attributes(attrs: dict) -> dict:
"""Remove attributes that described data before scaling."""
@@ -107,14 +144,6 @@ def _remove_attributes(attrs: dict) -> dict:
attrs.pop(attr_key, None)
return attrs
- @staticmethod
- def _scale_data(data_arr: Union[xr.DataArray, int], scale_factor: float, add_offset: float) -> xr.DataArray:
- """Scale data, if needed."""
- scaling_needed = not (scale_factor == 1.0 and add_offset == 0.0)
- if scaling_needed:
- data_arr = data_arr * scale_factor + add_offset
- return data_arr
-
@staticmethod
def _get_data(data, dataset_id: dict) -> xr.DataArray:
"""Get a dataset."""
@@ -123,28 +152,31 @@ def _get_data(data, dataset_id: dict) -> xr.DataArray:
attrs = data.attrs.copy()
- fill = attrs.get("_FillValue")
+ # don't need these attributes after applied.
factor = attrs.pop("scale_factor", (np.ones(1, dtype=data.dtype))[0])
offset = attrs.pop("add_offset", (np.zeros(1, dtype=data.dtype))[0])
+ flag_values = data.attrs.get("flag_values", [None])
valid_range = attrs.get("valid_range", [None])
- if isinstance(valid_range, np.ndarray):
- attrs["valid_range"] = valid_range.tolist()
- flags = not data.attrs.get("SCALED", 1) and any(data.attrs.get("flag_values", [None]))
- if not flags:
+ if isinstance(valid_range, np.ndarray):
+ valid_range = valid_range.tolist()
+ attrs["valid_range"] = valid_range
+
+ flags = not data.attrs.get("SCALED", 1) and any(flag_values)
+ if flags:
+ fill = attrs.get("_FillValue", None)
+ if isinstance(flag_values, np.ndarray) or isinstance(flag_values, list):
+ data = data.where((data >= flag_values[0]) & (data <= flag_values[-1]), fill)
+ else:
+ fill = attrs.pop("_FillValue", None)
data = data.where(data != fill)
- data = _CLAVRxHelper._scale_data(data, factor, offset)
- # don't need _FillValue if it has been applied.
- attrs.pop("_FillValue", None)
-
- if all(valid_range):
- valid_min = _CLAVRxHelper._scale_data(valid_range[0], factor, offset)
- valid_max = _CLAVRxHelper._scale_data(valid_range[1], factor, offset)
- if flags:
- data = data.where((data >= valid_min) & (data <= valid_max), fill)
- else:
+ data = _scale_data(data, factor, offset)
+
+ if valid_range[0] is not None:
+ valid_min = _scale_data(valid_range[0], factor, offset)
+ valid_max = _scale_data(valid_range[1], factor, offset)
data = data.where((data >= valid_min) & (data <= valid_max))
- attrs["valid_range"] = [valid_min, valid_max]
+ attrs["valid_range"] = [valid_min, valid_max]
data.attrs = _CLAVRxHelper._remove_attributes(attrs)
@@ -183,23 +215,27 @@ def _read_pug_fixed_grid(projection_coordinates: netCDF4.Variable, distance_mult
return proj_dict
@staticmethod
- def _find_input_nc(filename: str, l1b_base: str) -> str:
+ def _find_input_nc(filename: str, sensor: str, l1b_base: str) -> str:
dirname = os.path.dirname(filename)
l1b_filename = os.path.join(dirname, l1b_base + ".nc")
if os.path.exists(l1b_filename):
return str(l1b_filename)
- glob_pat = os.path.join(dirname, l1b_base + "*R20*.nc")
+ if sensor == "AHI":
+ glob_pat = os.path.join(dirname, l1b_base + "*R20*.nc")
+ else:
+ glob_pat = os.path.join(dirname, l1b_base + "*.nc")
+
LOG.debug("searching for {0}".format(glob_pat))
found_l1b_filenames = list(glob(glob_pat))
if len(found_l1b_filenames) == 0:
- raise IOError("Could not find navigation donor for {0}"
- " in same directory as CLAVR-x data".format(l1b_base))
+ fp = os.path.join(dirname, l1b_base)
+ raise IOError(f"Missing navigation donor {fp}")
LOG.debug("Candidate nav donors: {0}".format(repr(found_l1b_filenames)))
return found_l1b_filenames[0]
@staticmethod
- def _read_axi_fixed_grid(filename: str, l1b_attr) -> geometry.AreaDefinition:
+ def _read_axi_fixed_grid(filename: str, sensor: str, l1b_attr) -> geometry.AreaDefinition:
"""Read a fixed grid.
CLAVR-x does not transcribe fixed grid parameters to its output
@@ -214,7 +250,7 @@ def _read_axi_fixed_grid(filename: str, l1b_attr) -> geometry.AreaDefinition:
"""
LOG.debug(f"looking for corresponding input file for {l1b_attr}"
" to act as fixed grid navigation donor")
- l1b_path = _CLAVRxHelper._find_input_nc(filename, l1b_attr)
+ l1b_path = _CLAVRxHelper._find_input_nc(filename, sensor, l1b_attr)
LOG.info(f"CLAVR-x does not include fixed-grid parameters, use input file {l1b_path} as donor")
l1b = netCDF4.Dataset(l1b_path)
proj = None
@@ -236,13 +272,13 @@ def _read_axi_fixed_grid(filename: str, l1b_attr) -> geometry.AreaDefinition:
area_extent, ncols, nlines = _CLAVRxHelper._area_extent(x, y, h)
area = geometry.AreaDefinition(
- "ahi_geos",
- "AHI L2 file area",
- "ahi_geos",
- proj,
- ncols,
- nlines,
- np.asarray(area_extent))
+ f"{sensor}_geos",
+ f"{sensor.upper()} L2 file area",
+ f"{sensor}_geos",
+ proj,
+ ncols,
+ nlines,
+ area_extent)
return area
@@ -284,6 +320,11 @@ def __init__(self, filename, filename_info, filetype_info):
filename_info,
filetype_info)
+ self.sensor = _get_sensor(self.file_content.get("/attr/sensor"))
+ self.platform = _get_platform(self.file_content.get("/attr/platform"))
+ self.resolution = _CLAVRxHelper._get_nadir_resolution(self.sensor,
+ self.filename_info.get("resolution"))
+
@property
def start_time(self):
"""Get the start time."""
@@ -295,7 +336,7 @@ def end_time(self):
return self.filename_info.get("end_time", self.start_time)
def get_dataset(self, dataset_id, ds_info):
- """Get a dataset."""
+ """Get a dataset for Polar Sensors."""
var_name = ds_info.get("file_key", dataset_id["name"])
data = self[var_name]
data = _CLAVRxHelper._get_data(data, dataset_id)
@@ -303,61 +344,61 @@ def get_dataset(self, dataset_id, ds_info):
data.attrs, ds_info)
return data
- def get_nadir_resolution(self, sensor):
- """Get nadir resolution."""
- for k, v in NADIR_RESOLUTION.items():
- if sensor.startswith(k):
- return v
- res = self.filename_info.get("resolution")
- if res.endswith("m"):
- return int(res[:-1])
- elif res is not None:
- return int(res)
+ def _available_aliases(self, ds_info, current_var):
+ """Add alias if there is a match."""
+ new_info = ds_info.copy()
+ alias_info = CHANNEL_ALIASES.get(self.sensor).get(current_var, None)
+ if alias_info is not None:
+ alias_info.update({"file_key": current_var})
+ new_info.update(alias_info)
+ yield True, new_info
def available_datasets(self, configured_datasets=None):
- """Automatically determine datasets provided by this file."""
- self.sensor = _get_sensor(self.file_content.get("/attr/sensor"))
- self.platform = _get_platform(self.file_content.get("/attr/platform"))
-
- nadir_resolution = self.get_nadir_resolution(self.sensor)
- coordinates = ("longitude", "latitude")
+ """Add more information if this reader can provide it."""
handled_variables = set()
-
- # update previously configured datasets
for is_avail, ds_info in (configured_datasets or []):
- this_res = ds_info.get("resolution")
- this_coords = ds_info.get("coordinates")
# some other file handler knows how to load this
if is_avail is not None:
yield is_avail, ds_info
+ new_info = ds_info.copy() # don't change input
+ this_res = ds_info.get("resolution")
var_name = ds_info.get("file_key", ds_info["name"])
matches = self.file_type_matches(ds_info["file_type"])
# we can confidently say that we can provide this dataset and can
# provide more info
- if matches and var_name in self and this_res != nadir_resolution:
+ if matches and var_name in self and this_res != self.resolution:
handled_variables.add(var_name)
- new_info = ds_info.copy() # don't mess up the above yielded
- new_info["resolution"] = nadir_resolution
- if self._is_polar() and this_coords is None:
- new_info["coordinates"] = coordinates
+ new_info["resolution"] = self.resolution
+ if self._is_polar():
+ new_info["coordinates"] = ds_info.get("coordinates", ("longitude", "latitude"))
yield True, new_info
elif is_avail is None:
# if we didn't know how to handle this dataset and no one else did
# then we should keep it going down the chain
yield is_avail, ds_info
- # add new datasets
+ # get data from file dynamically
+ yield from self._dynamic_datasets()
+
+ def _dynamic_datasets(self):
+ """Get data from file and build aliases."""
for var_name, val in self.file_content.items():
if isinstance(val, SDS):
ds_info = {
"file_type": self.filetype_info["file_type"],
- "resolution": nadir_resolution,
+ "resolution": self.resolution,
"name": var_name,
}
if self._is_polar():
ds_info["coordinates"] = ["longitude", "latitude"]
+
+ # always yield what we have
yield True, ds_info
+ if CHANNEL_ALIASES.get(self.sensor) is not None:
+ # yield variable as it is
+ # yield any associated aliases
+ yield from self._available_aliases(ds_info, var_name)
def get_shape(self, dataset_id, ds_info):
"""Get the shape."""
@@ -376,7 +417,7 @@ def get_area_def(self, key):
return super(CLAVRXHDF4FileHandler, self).get_area_def(key)
l1b_att = str(self.file_content.get("/attr/L1B", None))
- area_def = _CLAVRxHelper._read_axi_fixed_grid(self.filename, l1b_att)
+ area_def = _CLAVRxHelper._read_axi_fixed_grid(self.filename, self.sensor, l1b_att)
return area_def
@@ -402,25 +443,37 @@ def __init__(self, filename, filename_info, filetype_info):
self.platform = _get_platform(
self.filename_info.get("platform_shortname", None))
self.sensor = _get_sensor(self.nc.attrs.get("sensor", None))
+ self.resolution = _CLAVRxHelper._get_nadir_resolution(self.sensor,
+ self.filename_info.get("resolution"))
# coordinates need scaling and valid_range (mask_and_scale won't work on valid_range)
self.nc.coords["latitude"] = _CLAVRxHelper._get_data(self.nc.coords["latitude"],
{"name": "latitude"})
self.nc.coords["longitude"] = _CLAVRxHelper._get_data(self.nc.coords["longitude"],
{"name": "longitude"})
- def _get_ds_info_for_data_arr(self, var_name):
+ def _dynamic_dataset_info(self, var_name):
+ """Set data name and, if applicable, aliases."""
ds_info = {
"file_type": self.filetype_info["file_type"],
"name": var_name,
}
- return ds_info
+ yield True, ds_info
+
+ if CHANNEL_ALIASES.get(self.sensor) is not None:
+ alias_info = ds_info.copy()
+ channel_info = CHANNEL_ALIASES.get(self.sensor).get(var_name, None)
+ if channel_info is not None:
+ channel_info["file_key"] = var_name
+ alias_info.update(channel_info)
+ yield True, alias_info
- def _is_2d_yx_data_array(self, data_arr):
+ @staticmethod
+ def _is_2d_yx_data_array(data_arr):
has_y_dim = data_arr.dims[0] == "y"
has_x_dim = data_arr.dims[1] == "x"
return has_y_dim and has_x_dim
- def _available_new_datasets(self, handled_vars):
+ def _available_file_datasets(self, handled_vars):
"""Metadata for available variables other than BT."""
possible_vars = list(self.nc.items()) + list(self.nc.coords.items())
for var_name, data_arr in possible_vars:
@@ -433,8 +486,7 @@ def _available_new_datasets(self, handled_vars):
# we need 'traditional' y/x dimensions currently
continue
- ds_info = self._get_ds_info_for_data_arr(var_name)
- yield True, ds_info
+ yield from self._dynamic_dataset_info(var_name)
def available_datasets(self, configured_datasets=None):
"""Dynamically discover what variables can be loaded from this file.
@@ -450,17 +502,23 @@ def available_datasets(self, configured_datasets=None):
# we don't know any more information than the previous
# file handler so let's yield early
yield is_avail, ds_info
- continue
- if self.file_type_matches(ds_info["file_type"]):
+
+ matches = self.file_type_matches(ds_info["file_type"])
+ if matches and ds_info.get("resolution") != self.resolution:
+ # reader knows something about this dataset (file type matches)
+ # add any information that this reader can add.
+ new_info = ds_info.copy()
+ if self.resolution is not None:
+ new_info["resolution"] = self.resolution
handled_vars.add(ds_info["name"])
- yield self.file_type_matches(ds_info["file_type"]), ds_info
- yield from self._available_new_datasets(handled_vars)
+ yield True, new_info
+ yield from self._available_file_datasets(handled_vars)
def _is_polar(self):
l1b_att, inst_att = (str(self.nc.attrs.get("L1B", None)),
str(self.nc.attrs.get("sensor", None)))
- return (inst_att != "AHI" and "GOES" not in inst_att) or (l1b_att is None)
+ return (inst_att not in ["ABI", "AHI"] and "GOES" not in inst_att) or (l1b_att is None)
def get_area_def(self, key):
"""Get the area definition of the data at hand."""
@@ -468,11 +526,11 @@ def get_area_def(self, key):
return super(CLAVRXNetCDFFileHandler, self).get_area_def(key)
l1b_att = str(self.nc.attrs.get("L1B", None))
- return _CLAVRxHelper._read_axi_fixed_grid(self.filename, l1b_att)
+ return _CLAVRxHelper._read_axi_fixed_grid(self.filename, self.sensor, l1b_att)
def get_dataset(self, dataset_id, ds_info):
- """Get a dataset."""
- var_name = ds_info.get("name", dataset_id["name"])
+ """Get a dataset for supported geostationary sensors."""
+ var_name = ds_info.get("file_key", dataset_id["name"])
data = self[var_name]
data = _CLAVRxHelper._get_data(data, dataset_id)
data.attrs = _CLAVRxHelper.get_metadata(self.sensor, self.platform,
@@ -481,5 +539,6 @@ def get_dataset(self, dataset_id, ds_info):
def __getitem__(self, item):
"""Wrap around `self.nc[item]`."""
+ # Check if "item" is an alias:
data = self.nc[item]
return data
diff --git a/satpy/tests/reader_tests/test_clavrx/__init__.py b/satpy/tests/reader_tests/test_clavrx/__init__.py
new file mode 100644
index 0000000000..6f62e3a26b
--- /dev/null
+++ b/satpy/tests/reader_tests/test_clavrx/__init__.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2017-2018 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""The clavrx reader tests package."""
diff --git a/satpy/tests/reader_tests/test_clavrx/test_clavrx_geohdf.py b/satpy/tests/reader_tests/test_clavrx/test_clavrx_geohdf.py
new file mode 100644
index 0000000000..85a7f6faa3
--- /dev/null
+++ b/satpy/tests/reader_tests/test_clavrx/test_clavrx_geohdf.py
@@ -0,0 +1,246 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2018 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Module for testing the satpy.readers.clavrx module."""
+
+import os
+import unittest
+from unittest import mock
+
+import numpy as np
+import pytest
+import xarray as xr
+from pyresample.geometry import AreaDefinition
+
+from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler
+
+DEFAULT_FILE_DTYPE = np.uint16
+DEFAULT_FILE_SHAPE = (10, 300)
+DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1],
+ dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE)
+DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32)
+DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
+DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0)
+DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
+DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0)
+
+class FakeHDF4FileHandlerGeo(FakeHDF4FileHandler):
+ """Swap-in HDF4 File Handler."""
+
+ def get_test_content(self, filename, filename_info, filetype_info):
+ """Mimic reader input file content."""
+ file_content = {
+ "/attr/platform": "HIM8",
+ "/attr/sensor": "AHI",
+ # this is a Level 2 file that came from a L1B file
+ "/attr/L1B": "clavrx_H08_20180806_1800",
+ }
+
+ file_content["longitude"] = xr.DataArray(
+ DEFAULT_LON_DATA,
+ dims=("y", "x"),
+ attrs={
+ "_FillValue": np.nan,
+ "scale_factor": 1.,
+ "add_offset": 0.,
+ "standard_name": "longitude",
+ })
+ file_content["longitude/shape"] = DEFAULT_FILE_SHAPE
+
+ file_content["latitude"] = xr.DataArray(
+ DEFAULT_LAT_DATA,
+ dims=("y", "x"),
+ attrs={
+ "_FillValue": np.nan,
+ "scale_factor": 1.,
+ "add_offset": 0.,
+ "standard_name": "latitude",
+ })
+ file_content["latitude/shape"] = DEFAULT_FILE_SHAPE
+
+ file_content["refl_1_38um_nom"] = xr.DataArray(
+ DEFAULT_FILE_DATA.astype(np.float32),
+ dims=("y", "x"),
+ attrs={
+ "SCALED": 1,
+ "add_offset": 59.0,
+ "scale_factor": 0.0018616290763020515,
+ "units": "%",
+ "_FillValue": -32768,
+ "valid_range": [-32767, 32767],
+ "actual_range": [-2., 120.],
+ "actual_missing": -999.0
+ })
+ file_content["refl_1_38um_nom/shape"] = DEFAULT_FILE_SHAPE
+
+ # data with fill values
+ file_content["variable2"] = xr.DataArray(
+ DEFAULT_FILE_DATA.astype(np.float32),
+ dims=("y", "x"),
+ attrs={
+ "_FillValue": -1,
+ "scale_factor": 1.,
+ "add_offset": 0.,
+ "units": "1",
+ })
+ file_content["variable2/shape"] = DEFAULT_FILE_SHAPE
+ file_content["variable2"] = file_content["variable2"].where(
+ file_content["variable2"] % 2 != 0)
+
+ # category
+ file_content["variable3"] = xr.DataArray(
+ DEFAULT_FILE_DATA.astype(np.byte),
+ dims=("y", "x"),
+ attrs={
+ "SCALED": 0,
+ "_FillValue": -128,
+ "flag_meanings": "clear water supercooled mixed ice unknown",
+ "flag_values": [0, 1, 2, 3, 4, 5],
+ "units": "1",
+ })
+ file_content["variable3/shape"] = DEFAULT_FILE_SHAPE
+
+ return file_content
+
+
+class TestCLAVRXReaderGeo(unittest.TestCase):
+ """Test CLAVR-X Reader with Geo files."""
+
+ yaml_file = "clavrx.yaml"
+
+ def setUp(self):
+ """Wrap HDF4 file handler with our own fake handler."""
+ from satpy._config import config_search_paths
+ from satpy.readers.clavrx import CLAVRXHDF4FileHandler
+ self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file))
+ # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library
+ self.p = mock.patch.object(CLAVRXHDF4FileHandler, "__bases__", (FakeHDF4FileHandlerGeo,))
+ self.fake_handler = self.p.start()
+ self.p.is_local = True
+
+ def tearDown(self):
+ """Stop wrapping the NetCDF4 file handler."""
+ self.p.stop()
+
+ def test_init(self):
+ """Test basic init with no extra parameters."""
+ from satpy.readers import load_reader
+ r = load_reader(self.reader_configs)
+ loadables = r.select_files_from_pathnames([
+ "clavrx_H08_20180806_1800.level2.hdf",
+ ])
+ assert len(loadables) == 1
+ r.create_filehandlers(loadables)
+ # make sure we have some files
+ assert r.file_handlers
+
+ def test_no_nav_donor(self):
+ """Test exception raised when no donor file is available."""
+ import xarray as xr
+
+ from satpy.readers import load_reader
+ r = load_reader(self.reader_configs)
+ fake_fn = "clavrx_H08_20180806_1800.level2.hdf"
+ with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray):
+ loadables = r.select_files_from_pathnames([fake_fn])
+ r.create_filehandlers(loadables)
+ l1b_base = fake_fn.split(".")[0]
+ msg = f"Missing navigation donor {l1b_base}"
+ with pytest.raises(IOError, match=msg):
+ r.load(["refl_1_38um_nom", "variable2", "variable3"])
+
+ def test_load_all_old_donor(self):
+ """Test loading all test datasets with old donor."""
+ import xarray as xr
+
+ from satpy.readers import load_reader
+ r = load_reader(self.reader_configs)
+ with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray):
+ loadables = r.select_files_from_pathnames([
+ "clavrx_H08_20180806_1800.level2.hdf",
+ ])
+ r.create_filehandlers(loadables)
+ with mock.patch("satpy.readers.clavrx.glob") as g, mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d:
+ g.return_value = ["fake_donor.nc"]
+ x = np.linspace(-0.1518, 0.1518, 300)
+ y = np.linspace(0.1518, -0.1518, 10)
+ proj = mock.Mock(
+ semi_major_axis=6378.137,
+ semi_minor_axis=6356.7523142,
+ perspective_point_height=35791,
+ longitude_of_projection_origin=140.7,
+ sweep_angle_axis="y",
+ )
+ d.return_value = fake_donor = mock.MagicMock(
+ variables={"Projection": proj, "x": x, "y": y},
+ )
+ fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key]
+ datasets = r.load(["refl_1_38um_nom", "variable2", "variable3"])
+ assert len(datasets) == 3
+ for v in datasets.values():
+ assert "calibration" not in v.attrs
+ assert v.attrs["units"] in ["1", "%"]
+ assert isinstance(v.attrs["area"], AreaDefinition)
+ if v.attrs.get("flag_values"):
+ assert "_FillValue" in v.attrs
+ else:
+ assert "_FillValue" not in v.attrs
+ if v.attrs["name"] == "refl_1_38um_nom":
+ assert "valid_range" in v.attrs
+ assert isinstance(v.attrs["valid_range"], list)
+ else:
+ assert "valid_range" not in v.attrs
+ if "flag_values" in v.attrs:
+ assert np.issubdtype(v.dtype, np.integer)
+ assert v.attrs.get("flag_meanings") is not None
+
+ def test_load_all_new_donor(self):
+ """Test loading all test datasets with new donor."""
+ import xarray as xr
+
+ from satpy.readers import load_reader
+ r = load_reader(self.reader_configs)
+ with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray):
+ loadables = r.select_files_from_pathnames([
+ "clavrx_H08_20180806_1800.level2.hdf",
+ ])
+ r.create_filehandlers(loadables)
+ with mock.patch("satpy.readers.clavrx.glob") as g, mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d:
+ g.return_value = ["fake_donor.nc"]
+ x = np.linspace(-0.1518, 0.1518, 300)
+ y = np.linspace(0.1518, -0.1518, 10)
+ proj = mock.Mock(
+ semi_major_axis=6378137,
+ semi_minor_axis=6356752.3142,
+ perspective_point_height=35791000,
+ longitude_of_projection_origin=140.7,
+ sweep_angle_axis="y",
+ )
+ d.return_value = fake_donor = mock.MagicMock(
+ variables={"goes_imager_projection": proj, "x": x, "y": y},
+ )
+ fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key]
+ datasets = r.load(["refl_1_38um_nom", "variable2", "variable3"])
+ assert len(datasets) == 3
+ for v in datasets.values():
+ assert "calibration" not in v.attrs
+ assert v.attrs["units"] in ["1", "%"]
+ assert isinstance(v.attrs["area"], AreaDefinition)
+ assert v.attrs["area"].is_geostationary is True
+ assert v.attrs["platform_name"] == "himawari8"
+ assert v.attrs["sensor"] == "ahi"
+ assert datasets["variable3"].attrs.get("flag_meanings") is not None
diff --git a/satpy/tests/reader_tests/test_clavrx/test_clavrx_nc.py b/satpy/tests/reader_tests/test_clavrx/test_clavrx_nc.py
new file mode 100644
index 0000000000..3cb188d76c
--- /dev/null
+++ b/satpy/tests/reader_tests/test_clavrx/test_clavrx_nc.py
@@ -0,0 +1,326 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2018 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Module for testing the satpy.readers.clavrx module."""
+import os
+from unittest import mock
+
+import numpy as np
+import pytest
+import xarray as xr
+from pyresample.geometry import AreaDefinition
+
+from satpy.readers import load_reader
+
+ABI_FILE = "clavrx_OR_ABI-L1b-RadC-M6C01_G16_s20231021601173.level2.nc"
+DEFAULT_FILE_DTYPE = np.uint16
+DEFAULT_FILE_SHAPE = (5, 5)
+DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1],
+ dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE)
+DEFAULT_FILE_FLAGS = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1],
+ dtype=np.byte).reshape(DEFAULT_FILE_SHAPE)
+DEFAULT_FILE_FLAGS_BEYOND_FILL = DEFAULT_FILE_FLAGS
+DEFAULT_FILE_FLAGS_BEYOND_FILL[-1][:-2] = [-127, -127, -128]
+DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32)
+DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
+DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0)
+DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
+DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0)
+L1B_FILE = "clavrx_OR_ABI-L1b-RadC-M6C01_G16_s20231021601173"
+ABI_FILE = f"{L1B_FILE}.level2.nc"
+FILL_VALUE = -32768
+
+
+def fake_test_content(filename, **kwargs):
+ """Mimic reader input file content."""
+ attrs = {
+ "platform": "G16",
+ "sensor": "ABI",
+ # this is a Level 2 file that came from a L1B file
+ "L1B": L1B_FILE,
+ }
+
+ longitude = xr.DataArray(DEFAULT_LON_DATA,
+ dims=("scan_lines_along_track_direction",
+ "pixel_elements_along_scan_direction"),
+ attrs={"_FillValue": -999.,
+ "SCALED": 0,
+ "scale_factor": 1.,
+ "add_offset": 0.,
+ "standard_name": "longitude",
+ "units": "degrees_east"
+ })
+
+ latitude = xr.DataArray(DEFAULT_LAT_DATA,
+ dims=("scan_lines_along_track_direction",
+ "pixel_elements_along_scan_direction"),
+ attrs={"_FillValue": -999.,
+ "SCALED": 0,
+ "scale_factor": 1.,
+ "add_offset": 0.,
+ "standard_name": "latitude",
+ "units": "degrees_south"
+ })
+
+ variable1 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.int8),
+ dims=("scan_lines_along_track_direction",
+ "pixel_elements_along_scan_direction"),
+ attrs={"_FillValue": -127,
+ "SCALED": 0,
+ "units": "1",
+ })
+
+ # data with fill values and a file_type alias
+ variable2 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.int16),
+ dims=("scan_lines_along_track_direction",
+ "pixel_elements_along_scan_direction"),
+ attrs={"_FillValue": FILL_VALUE,
+ "SCALED": 1,
+ "scale_factor": 0.001861629,
+ "add_offset": 59.,
+ "units": "%",
+ "valid_range": [-32767, 32767],
+ # this is a Level 2 file that came from a L1B file
+ "L1B": "clavrx_H08_20210603_1500_B01_FLDK_R",
+ }
+ )
+ variable2 = variable2.where(variable2 % 2 != 0, FILL_VALUE)
+
+ # category
+ var_flags = xr.DataArray(DEFAULT_FILE_FLAGS.astype(np.int8),
+ dims=("scan_lines_along_track_direction",
+ "pixel_elements_along_scan_direction"),
+ attrs={"SCALED": 0,
+ "_FillValue": -127,
+ "units": "1",
+ "flag_values": [0, 1, 2, 3]})
+
+ out_of_range_flags = xr.DataArray(DEFAULT_FILE_FLAGS_BEYOND_FILL.astype(np.int8),
+ dims=("scan_lines_along_track_direction",
+ "pixel_elements_along_scan_direction"),
+ attrs={"SCALED": 0,
+ "_FillValue": -127,
+ "units": "1",
+ "flag_values": [0, 1, 2, 3]})
+
+ ds_vars = {
+ "longitude": longitude,
+ "latitude": latitude,
+ "variable1": variable1,
+ "refl_0_65um_nom": variable2,
+ "var_flags": var_flags,
+ "out_of_range_flags": out_of_range_flags,
+ }
+ ds = xr.Dataset(ds_vars, attrs=attrs)
+ ds = ds.assign_coords({"latitude": latitude, "longitude": longitude})
+
+ return ds
+
+
+class TestCLAVRXReaderGeo:
+ """Test CLAVR-X Reader with Geo files."""
+
+ yaml_file = "clavrx.yaml"
+
+ def setup_method(self):
+ """Read fake data."""
+ from satpy._config import config_search_paths
+ self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file))
+
+ @pytest.mark.parametrize(
+ ("filenames", "expected_loadables"),
+ [([ABI_FILE], 1)]
+ )
+ def test_reader_creation(self, filenames, expected_loadables):
+ """Test basic initialization."""
+ with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od:
+ od.side_effect = fake_test_content
+ r = load_reader(self.reader_configs)
+ loadables = r.select_files_from_pathnames(filenames)
+ assert len(loadables) == expected_loadables
+ r.create_filehandlers(loadables)
+ # make sure we have some files
+ assert r.file_handlers
+
+ @pytest.mark.parametrize(
+ ("filenames", "expected_datasets"),
+ [([ABI_FILE], ["variable1", "refl_0_65um_nom", "C02", "var_flags",
+ "out_of_range_flags", "longitude", "latitude"]), ]
+ )
+ def test_available_datasets(self, filenames, expected_datasets):
+ """Test that variables are dynamically discovered."""
+ from satpy.readers import load_reader
+ with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od:
+ od.side_effect = fake_test_content
+ r = load_reader(self.reader_configs)
+ loadables = r.select_files_from_pathnames(filenames)
+ r.create_filehandlers(loadables)
+ avails = list(r.available_dataset_names)
+ for var_name in expected_datasets:
+ assert var_name in avails
+
+ @pytest.mark.parametrize(
+ ("filenames", "loadable_ids"),
+ [([ABI_FILE], ["variable1", "refl_0_65um_nom", "var_flags", "out_of_range_flags"]), ]
+ )
+ def test_load_all_new_donor(self, filenames, loadable_ids):
+ """Test loading all test datasets with new donor."""
+ with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od:
+ od.side_effect = fake_test_content
+ r = load_reader(self.reader_configs)
+ loadables = r.select_files_from_pathnames(filenames)
+ r.create_filehandlers(loadables)
+
+ with mock.patch("satpy.readers.clavrx.glob") as g, \
+ mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d:
+ g.return_value = ["fake_donor.nc"]
+ x = np.linspace(-0.1518, 0.1518, DEFAULT_FILE_SHAPE[1])
+ y = np.linspace(0.1518, -0.1518, DEFAULT_FILE_SHAPE[0])
+ proj = mock.Mock(
+ semi_major_axis=6378137,
+ semi_minor_axis=6356752.3142,
+ perspective_point_height=35791000,
+ longitude_of_projection_origin=140.7,
+ sweep_angle_axis="y",
+ )
+ d.return_value = fake_donor = mock.MagicMock(
+ variables={"goes_imager_projection": proj, "x": x, "y": y},
+ )
+ fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key]
+
+ datasets = r.load(loadable_ids + ["C02"])
+ assert len(datasets) == len(loadable_ids)+1
+
+ # should have file variable and one alias for reflectance
+ assert "valid_range" not in datasets["variable1"].attrs
+ assert "_FillValue" not in datasets["variable1"].attrs
+ assert np.float32 == datasets["variable1"].dtype
+ assert "valid_range" not in datasets["variable1"].attrs
+
+ assert np.issubdtype(datasets["var_flags"].dtype, np.integer)
+ assert datasets["var_flags"].attrs.get("flag_meanings") is not None
+ assert "" == datasets["var_flags"].attrs.get("flag_meanings")
+ assert np.issubdtype(datasets["out_of_range_flags"].dtype, np.integer)
+ assert "valid_range" not in datasets["out_of_range_flags"].attrs
+
+ assert isinstance(datasets["refl_0_65um_nom"].valid_range, list)
+ assert np.float32 == datasets["refl_0_65um_nom"].dtype
+ assert "_FillValue" not in datasets["refl_0_65um_nom"].attrs
+ assert "valid_range" in datasets["refl_0_65um_nom"].attrs
+
+ assert "refl_0_65um_nom" == datasets["C02"].file_key
+ assert "_FillValue" not in datasets["C02"].attrs
+
+ for v in datasets.values():
+ assert isinstance(v.area, AreaDefinition)
+ assert v.platform_name == "GOES-16"
+ assert v.sensor == "abi"
+
+ assert "calibration" not in v.attrs
+ assert "rows_per_scan" not in v.coords.get("longitude").attrs
+ assert "units" in v.attrs
+
+ @pytest.mark.parametrize(
+ ("filenames", "expected_loadables"),
+ [([ABI_FILE], 1)]
+ )
+ def test_yaml_datasets(self, filenames, expected_loadables):
+ """Test available_datasets with fake variables from YAML."""
+ with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od:
+ od.side_effect = fake_test_content
+ r = load_reader(self.reader_configs)
+ loadables = r.select_files_from_pathnames(filenames)
+ r.create_filehandlers(loadables)
+
+ with mock.patch("satpy.readers.clavrx.glob") as g, \
+ mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d:
+ g.return_value = ["fake_donor.nc"]
+ x = np.linspace(-0.1518, 0.1518, 5)
+ y = np.linspace(0.1518, -0.1518, 5)
+ proj = mock.Mock(
+ semi_major_axis=6378137,
+ semi_minor_axis=6356752.3142,
+ perspective_point_height=35791000,
+ longitude_of_projection_origin=-137.2,
+ sweep_angle_axis="x",
+ )
+ d.return_value = fake_donor = mock.MagicMock(
+ variables={"goes_imager_projection": proj, "x": x, "y": y},
+ )
+ fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key]
+ # mimic the YAML file being configured for more datasets
+ fake_dataset_info = [
+ (None, {"name": "yaml1", "resolution": None, "file_type": ["clavrx_nc"]}),
+ (True, {"name": "yaml2", "resolution": 0.5, "file_type": ["clavrx_nc"]}),
+ ]
+ new_ds_infos = list(r.file_handlers["clavrx_nc"][0].available_datasets(
+ fake_dataset_info))
+ assert len(new_ds_infos) == 10
+
+ # we have this and can provide the resolution
+ assert (new_ds_infos[0][0])
+ assert new_ds_infos[0][1]["resolution"] == 2004 # hardcoded
+
+ # we have this, but previous file handler said it knew about it
+ # and it is producing the same resolution as what we have
+ assert (new_ds_infos[1][0])
+ assert new_ds_infos[1][1]["resolution"] == 0.5
+
+ # we have this, but don"t want to change the resolution
+ # because a previous handler said it has it
+ assert (new_ds_infos[2][0])
+ assert new_ds_infos[2][1]["resolution"] == 2004
+
+ @pytest.mark.parametrize(
+ ("filenames", "loadable_ids"),
+ [([ABI_FILE], ["variable1", "refl_0_65um_nom", "var_flags", "out_of_range_flags"]), ]
+ )
+ def test_scale_data(self, filenames, loadable_ids):
+ """Test that data is scaled when necessary and not scaled data are flags."""
+ from satpy.readers.clavrx import _scale_data
+ with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od:
+ od.side_effect = fake_test_content
+ r = load_reader(self.reader_configs)
+ loadables = r.select_files_from_pathnames(filenames)
+ r.create_filehandlers(loadables)
+ with mock.patch("satpy.readers.clavrx.glob") as g, \
+ mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d:
+ g.return_value = ["fake_donor.nc"]
+ x = np.linspace(-0.1518, 0.1518, 5)
+ y = np.linspace(0.1518, -0.1518, 5)
+ proj = mock.Mock(
+ semi_major_axis=6378137,
+ semi_minor_axis=6356752.3142,
+ perspective_point_height=35791000,
+ longitude_of_projection_origin=-137.2,
+ sweep_angle_axis="x",
+ )
+ d.return_value = fake_donor = mock.MagicMock(
+ variables={"goes_imager_projection": proj, "x": x, "y": y},
+ )
+ fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key]
+
+ ds_scale = ["variable1", "refl_0_65um_nom"]
+ ds_no_scale = ["var_flags", "out_of_range_flags"]
+
+ with mock.patch("satpy.readers.clavrx._scale_data", wraps=_scale_data) as scale_data:
+ r.load(ds_scale)
+ scale_data.assert_called()
+
+ with mock.patch("satpy.readers.clavrx._scale_data", wraps=_scale_data) as scale_data2:
+ r.load(ds_no_scale)
+ scale_data2.assert_not_called()
diff --git a/satpy/tests/reader_tests/test_clavrx.py b/satpy/tests/reader_tests/test_clavrx/test_clavrx_polarhdf.py
similarity index 53%
rename from satpy/tests/reader_tests/test_clavrx.py
rename to satpy/tests/reader_tests/test_clavrx/test_clavrx_polarhdf.py
index b4b1aef1a5..f8ae93c38b 100644
--- a/satpy/tests/reader_tests/test_clavrx.py
+++ b/satpy/tests/reader_tests/test_clavrx/test_clavrx_polarhdf.py
@@ -23,9 +23,8 @@
import dask.array as da
import numpy as np
-import pytest
import xarray as xr
-from pyresample.geometry import AreaDefinition, SwathDefinition
+from pyresample.geometry import SwathDefinition
from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler
@@ -49,7 +48,6 @@ def get_test_content(self, filename, filename_info, filetype_info):
"/attr/platform": "SNPP",
"/attr/sensor": "VIIRS",
}
-
file_content["longitude"] = xr.DataArray(
da.from_array(DEFAULT_LON_DATA, chunks=4096),
attrs={
@@ -105,6 +103,20 @@ def get_test_content(self, filename, filename_info, filetype_info):
})
file_content["variable3/shape"] = DEFAULT_FILE_SHAPE
+ file_content["refl_1_38um_nom"] = xr.DataArray(
+ da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.float32),
+ attrs={
+ "SCALED": 1,
+ "add_offset": 59.0,
+ "scale_factor": 0.0018616290763020515,
+ "units": "%",
+ "_FillValue": -32768,
+ "valid_range": [-32767, 32767],
+ "actual_range": [-2., 120.],
+ "actual_missing": -999.0
+ })
+ file_content["refl_1_38um_nom/shape"] = DEFAULT_FILE_SHAPE
+
return file_content
@@ -189,7 +201,7 @@ def test_available_datasets(self):
assert new_ds_infos[4][0]
assert new_ds_infos[4][1]["resolution"] == 742
- # we don't have this variable, don't change it
+ # we don"t have this variable, don't change it
assert not new_ds_infos[5][0]
assert new_ds_infos[5][1].get("resolution") is None
@@ -205,8 +217,8 @@ def test_available_datasets(self):
assert new_ds_infos[8][0]
assert new_ds_infos[8][1]["resolution"] == 742
- def test_load_all(self):
- """Test loading all test datasets."""
+ def test_available_datasets_with_alias(self):
+ """Test availability of aliased dataset."""
import xarray as xr
from satpy.readers import load_reader
@@ -216,218 +228,32 @@ def test_load_all(self):
"clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf",
])
r.create_filehandlers(loadables)
+ available_ds = list(r.file_handlers["clavrx_hdf4"][0].available_datasets())
- var_list = ["variable1", "variable2", "variable3"]
- datasets = r.load(var_list)
- assert len(datasets) == len(var_list)
- for v in datasets.values():
- assert v.attrs["units"] == "1"
- assert v.attrs["platform_name"] == "npp"
- assert v.attrs["sensor"] == "viirs"
- assert isinstance(v.attrs["area"], SwathDefinition)
- assert v.attrs["area"].lons.attrs["rows_per_scan"] == 16
- assert v.attrs["area"].lats.attrs["rows_per_scan"] == 16
- assert isinstance(datasets["variable3"].attrs.get("flag_meanings"), list)
+ assert available_ds[5][1]["name"] == "refl_1_38um_nom"
+ assert available_ds[6][1]["name"] == "M09"
+ assert available_ds[6][1]["file_key"] == "refl_1_38um_nom"
-
-class FakeHDF4FileHandlerGeo(FakeHDF4FileHandler):
- """Swap-in HDF4 File Handler."""
-
- def get_test_content(self, filename, filename_info, filetype_info):
- """Mimic reader input file content."""
- file_content = {
- "/attr/platform": "HIM8",
- "/attr/sensor": "AHI",
- # this is a Level 2 file that came from a L1B file
- "/attr/L1B": "clavrx_H08_20180806_1800",
- }
-
- file_content["longitude"] = xr.DataArray(
- DEFAULT_LON_DATA,
- dims=("y", "x"),
- attrs={
- "_FillValue": np.nan,
- "scale_factor": 1.,
- "add_offset": 0.,
- "standard_name": "longitude",
- })
- file_content["longitude/shape"] = DEFAULT_FILE_SHAPE
-
- file_content["latitude"] = xr.DataArray(
- DEFAULT_LAT_DATA,
- dims=("y", "x"),
- attrs={
- "_FillValue": np.nan,
- "scale_factor": 1.,
- "add_offset": 0.,
- "standard_name": "latitude",
- })
- file_content["latitude/shape"] = DEFAULT_FILE_SHAPE
-
- file_content["variable1"] = xr.DataArray(
- DEFAULT_FILE_DATA.astype(np.float32),
- dims=("y", "x"),
- attrs={
- "_FillValue": -1,
- "scale_factor": 1.,
- "add_offset": 0.,
- "units": "1",
- "valid_range": (-32767, 32767),
- })
- file_content["variable1/shape"] = DEFAULT_FILE_SHAPE
-
- # data with fill values
- file_content["variable2"] = xr.DataArray(
- DEFAULT_FILE_DATA.astype(np.float32),
- dims=("y", "x"),
- attrs={
- "_FillValue": -1,
- "scale_factor": 1.,
- "add_offset": 0.,
- "units": "1",
- })
- file_content["variable2/shape"] = DEFAULT_FILE_SHAPE
- file_content["variable2"] = file_content["variable2"].where(
- file_content["variable2"] % 2 != 0)
-
- # category
- file_content["variable3"] = xr.DataArray(
- DEFAULT_FILE_DATA.astype(np.byte),
- dims=("y", "x"),
- attrs={
- "SCALED": 0,
- "_FillValue": -128,
- "flag_meanings": "clear water supercooled mixed ice unknown",
- "flag_values": [0, 1, 2, 3, 4, 5],
- "units": "1",
- })
- file_content["variable3/shape"] = DEFAULT_FILE_SHAPE
-
- return file_content
-
-
-class TestCLAVRXReaderGeo(unittest.TestCase):
- """Test CLAVR-X Reader with Geo files."""
-
- yaml_file = "clavrx.yaml"
-
- def setUp(self):
- """Wrap HDF4 file handler with our own fake handler."""
- from satpy._config import config_search_paths
- from satpy.readers.clavrx import CLAVRXHDF4FileHandler
- self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file))
- # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library
- self.p = mock.patch.object(CLAVRXHDF4FileHandler, "__bases__", (FakeHDF4FileHandlerGeo,))
- self.fake_handler = self.p.start()
- self.p.is_local = True
-
- def tearDown(self):
- """Stop wrapping the NetCDF4 file handler."""
- self.p.stop()
-
- def test_init(self):
- """Test basic init with no extra parameters."""
- from satpy.readers import load_reader
- r = load_reader(self.reader_configs)
- loadables = r.select_files_from_pathnames([
- "clavrx_H08_20180806_1800.level2.hdf",
- ])
- assert len(loadables) == 1
- r.create_filehandlers(loadables)
- # make sure we have some files
- assert r.file_handlers
-
- def test_no_nav_donor(self):
- """Test exception raised when no donor file is available."""
- import xarray as xr
-
- from satpy.readers import load_reader
- r = load_reader(self.reader_configs)
- with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray):
- loadables = r.select_files_from_pathnames([
- "clavrx_H08_20180806_1800.level2.hdf",
- ])
- r.create_filehandlers(loadables)
- with pytest.raises(IOError, match="Could not find navigation donor for"):
- r.load(["variable1", "variable2", "variable3"])
-
- def test_load_all_old_donor(self):
- """Test loading all test datasets with old donor."""
+ def test_load_all(self):
+ """Test loading all test datasets."""
import xarray as xr
from satpy.readers import load_reader
r = load_reader(self.reader_configs)
with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray):
loadables = r.select_files_from_pathnames([
- "clavrx_H08_20180806_1800.level2.hdf",
+ "clavrx_npp_d20170520_t2053581_e2055223_b28822.level2.hdf",
])
r.create_filehandlers(loadables)
- with mock.patch("satpy.readers.clavrx.glob") as g, mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d:
- g.return_value = ["fake_donor.nc"]
- x = np.linspace(-0.1518, 0.1518, 300)
- y = np.linspace(0.1518, -0.1518, 10)
- proj = mock.Mock(
- semi_major_axis=6378.137,
- semi_minor_axis=6356.7523142,
- perspective_point_height=35791,
- longitude_of_projection_origin=140.7,
- sweep_angle_axis="y",
- )
- d.return_value = fake_donor = mock.MagicMock(
- variables={"Projection": proj, "x": x, "y": y},
- )
- fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key]
- datasets = r.load(["variable1", "variable2", "variable3"])
- assert len(datasets) == 3
- for v in datasets.values():
- assert "calibration" not in v.attrs
- assert v.attrs["units"] == "1"
- assert isinstance(v.attrs["area"], AreaDefinition)
- if v.attrs.get("flag_values"):
- assert "_FillValue" in v.attrs
- else:
- assert "_FillValue" not in v.attrs
- if v.attrs["name"] == "variable1":
- assert isinstance(v.attrs["valid_range"], list)
- else:
- assert "valid_range" not in v.attrs
- if "flag_values" in v.attrs:
- assert np.issubdtype(v.dtype, np.integer)
- assert v.attrs.get("flag_meanings") is not None
-
- def test_load_all_new_donor(self):
- """Test loading all test datasets with new donor."""
- import xarray as xr
- from satpy.readers import load_reader
- r = load_reader(self.reader_configs)
- with mock.patch("satpy.readers.clavrx.SDS", xr.DataArray):
- loadables = r.select_files_from_pathnames([
- "clavrx_H08_20180806_1800.level2.hdf",
- ])
- r.create_filehandlers(loadables)
- with mock.patch("satpy.readers.clavrx.glob") as g, mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d:
- g.return_value = ["fake_donor.nc"]
- x = np.linspace(-0.1518, 0.1518, 300)
- y = np.linspace(0.1518, -0.1518, 10)
- proj = mock.Mock(
- semi_major_axis=6378137,
- semi_minor_axis=6356752.3142,
- perspective_point_height=35791000,
- longitude_of_projection_origin=140.7,
- sweep_angle_axis="y",
- )
- d.return_value = fake_donor = mock.MagicMock(
- variables={"goes_imager_projection": proj, "x": x, "y": y},
- )
- fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key]
- datasets = r.load(["variable1", "variable2", "variable3"])
- assert len(datasets) == 3
+ var_list = ["M09", "variable2", "variable3"]
+ datasets = r.load(var_list)
+ assert len(datasets) == len(var_list)
for v in datasets.values():
- assert "calibration" not in v.attrs
- assert v.attrs["units"] == "1"
- assert isinstance(v.attrs["area"], AreaDefinition)
- assert v.attrs["area"].is_geostationary
- assert v.attrs["platform_name"] == "himawari8"
- assert v.attrs["sensor"] == "ahi"
- assert datasets["variable3"].attrs.get("flag_meanings") is not None
+ assert v.attrs["units"] in ["1", "%"]
+ assert v.attrs["platform_name"] == "npp"
+ assert v.attrs["sensor"] == "viirs"
+ assert isinstance(v.attrs["area"], SwathDefinition)
+ assert v.attrs["area"].lons.attrs["rows_per_scan"] == 16
+ assert v.attrs["area"].lats.attrs["rows_per_scan"] == 16
+ assert isinstance(datasets["variable3"].attrs.get("flag_meanings"), list)
diff --git a/satpy/tests/reader_tests/test_clavrx_nc.py b/satpy/tests/reader_tests/test_clavrx_nc.py
deleted file mode 100644
index 33be29078a..0000000000
--- a/satpy/tests/reader_tests/test_clavrx_nc.py
+++ /dev/null
@@ -1,202 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# Copyright (c) 2021 Satpy developers
-#
-# This file is part of satpy.
-#
-# satpy is free software: you can redistribute it and/or modify it under the
-# terms of the GNU General Public License as published by the Free Software
-# Foundation, either version 3 of the License, or (at your option) any later
-# version.
-#
-# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
-# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
-# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along with
-# satpy. If not, see .
-"""Module for testing the satpy.readers.clavrx module."""
-
-import os
-from unittest import mock
-
-import numpy as np
-import pytest
-import xarray as xr
-from pyresample.geometry import AreaDefinition
-
-DEFAULT_FILE_DTYPE = np.uint16
-DEFAULT_FILE_SHAPE = (10, 300)
-DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1],
- dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE)
-DEFAULT_FILE_FLAGS = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1],
- dtype=np.byte).reshape(DEFAULT_FILE_SHAPE)
-DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32)
-DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
-DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0)
-DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
-DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0)
-AHI_FILE = "clavrx_H08_20210603_1500_B01_FLDK_R.level2.nc"
-
-
-def fake_test_content(filename, **kwargs):
- """Mimic reader input file content."""
- attrs = {
- "platform": "HIM8",
- "sensor": "AHI",
- # this is a Level 2 file that came from a L1B file
- "L1B": "clavrx_H08_20210603_1500_B01_FLDK_R",
- }
-
- longitude = xr.DataArray(DEFAULT_LON_DATA,
- dims=("scan_lines_along_track_direction",
- "pixel_elements_along_scan_direction"),
- attrs={"_FillValue": np.nan,
- "scale_factor": 1.,
- "add_offset": 0.,
- "standard_name": "longitude",
- "units": "degrees_east"
- })
-
- latitude = xr.DataArray(DEFAULT_LAT_DATA,
- dims=("scan_lines_along_track_direction",
- "pixel_elements_along_scan_direction"),
- attrs={"_FillValue": np.nan,
- "scale_factor": 1.,
- "add_offset": 0.,
- "standard_name": "latitude",
- "units": "degrees_south"
- })
-
- variable1 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.float32),
- dims=("scan_lines_along_track_direction",
- "pixel_elements_along_scan_direction"),
- attrs={"_FillValue": np.nan,
- "scale_factor": 1.,
- "add_offset": 0.,
- "units": "1",
- "valid_range": [-32767, 32767],
- })
-
- # data with fill values
- variable2 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.float32),
- dims=("scan_lines_along_track_direction",
- "pixel_elements_along_scan_direction"),
- attrs={"_FillValue": np.nan,
- "scale_factor": 1.,
- "add_offset": 0.,
- "units": "1",
- "valid_range": [-32767, 32767],
- })
- variable2 = variable2.where(variable2 % 2 != 0)
-
- # category
- variable3 = xr.DataArray(DEFAULT_FILE_FLAGS,
- dims=("scan_lines_along_track_direction",
- "pixel_elements_along_scan_direction"),
- attrs={"SCALED": 0,
- "_FillValue": -127,
- "units": "1",
- "flag_values": [0, 1, 2, 3]})
-
- ds_vars = {
- "longitude": longitude,
- "latitude": latitude,
- "variable1": variable1,
- "variable2": variable2,
- "variable3": variable3
- }
-
- ds = xr.Dataset(ds_vars, attrs=attrs)
- ds = ds.assign_coords({"latitude": latitude, "longitude": longitude})
-
- return ds
-
-
-class TestCLAVRXReaderGeo:
- """Test CLAVR-X Reader with Geo files."""
-
- yaml_file = "clavrx.yaml"
-
- def setup_method(self):
- """Read fake data."""
- from satpy._config import config_search_paths
- self.reader_configs = config_search_paths(os.path.join("readers", self.yaml_file))
-
- @pytest.mark.parametrize(
- ("filenames", "expected_loadables"),
- [([AHI_FILE], 1)]
- )
- def test_reader_creation(self, filenames, expected_loadables):
- """Test basic initialization."""
- from satpy.readers import load_reader
- with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od:
- od.side_effect = fake_test_content
- r = load_reader(self.reader_configs)
- loadables = r.select_files_from_pathnames(filenames)
- assert len(loadables) == expected_loadables
- r.create_filehandlers(loadables)
- # make sure we have some files
- assert r.file_handlers
-
- @pytest.mark.parametrize(
- ("filenames", "expected_datasets"),
- [([AHI_FILE], ["variable1", "variable2", "variable3"]), ]
- )
- def test_available_datasets(self, filenames, expected_datasets):
- """Test that variables are dynamically discovered."""
- from satpy.readers import load_reader
- with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od:
- od.side_effect = fake_test_content
- r = load_reader(self.reader_configs)
- loadables = r.select_files_from_pathnames(filenames)
- r.create_filehandlers(loadables)
- avails = list(r.available_dataset_names)
- for var_name in expected_datasets:
- assert var_name in avails
-
- @pytest.mark.parametrize(
- ("filenames", "loadable_ids"),
- [([AHI_FILE], ["variable1", "variable2", "variable3"]), ]
- )
- def test_load_all_new_donor(self, filenames, loadable_ids):
- """Test loading all test datasets with new donor."""
- from satpy.readers import load_reader
- with mock.patch("satpy.readers.clavrx.xr.open_dataset") as od:
- od.side_effect = fake_test_content
- r = load_reader(self.reader_configs)
- loadables = r.select_files_from_pathnames(filenames)
- r.create_filehandlers(loadables)
- with mock.patch("satpy.readers.clavrx.glob") as g, \
- mock.patch("satpy.readers.clavrx.netCDF4.Dataset") as d:
- g.return_value = ["fake_donor.nc"]
- x = np.linspace(-0.1518, 0.1518, 300)
- y = np.linspace(0.1518, -0.1518, 10)
- proj = mock.Mock(
- semi_major_axis=6378137,
- semi_minor_axis=6356752.3142,
- perspective_point_height=35791000,
- longitude_of_projection_origin=140.7,
- sweep_angle_axis="y",
- )
- d.return_value = fake_donor = mock.MagicMock(
- variables={"goes_imager_projection": proj, "x": x, "y": y},
- )
- fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key]
- datasets = r.load(loadable_ids)
- assert len(datasets) == 3
- for v in datasets.values():
- assert "calibration" not in v.attrs
- assert v.attrs["units"] == "1"
- assert isinstance(v.attrs["area"], AreaDefinition)
- assert v.attrs["platform_name"] == "himawari8"
- assert v.attrs["sensor"] == "ahi"
- assert "rows_per_scan" not in v.coords.get("longitude").attrs
- if v.attrs["name"] in ["variable1", "variable2"]:
- assert isinstance(v.attrs["valid_range"], list)
- assert v.dtype == np.float32
- assert "_FillValue" not in v.attrs.keys()
- else:
- assert (datasets["variable3"].attrs.get("flag_meanings")) is not None
- assert (datasets["variable3"].attrs.get("flag_meanings") == "")
- assert np.issubdtype(v.dtype, np.integer)