diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 66a4db273d..995f3035c4 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -20,7 +20,7 @@ repos:
- id: bandit
args: [--ini, .bandit]
- repo: https://github.com/pre-commit/mirrors-mypy
- rev: 'v1.3.0' # Use the sha / tag you want to point at
+ rev: 'v1.4.1' # Use the sha / tag you want to point at
hooks:
- id: mypy
additional_dependencies:
diff --git a/AUTHORS.md b/AUTHORS.md
index fa5e0272d0..9078e441b4 100644
--- a/AUTHORS.md
+++ b/AUTHORS.md
@@ -27,6 +27,7 @@ The following people have made contributions to this project:
- [Ulrik Egede (egede)](https://github.com/egede)
- [Joleen Feltz (joleenf)](https://github.com/joleenf)
- [Stephan Finkensieper (sfinkens)](https://github.com/sfinkens) - Deutscher Wetterdienst
+- [Gionata Ghiggi (ghiggi)](https://github.com/ghiggi)
- [Andrea Grillini (AppLEaDaY)](https://github.com/AppLEaDaY)
- [Blanka Gvozdikova (gvozdikb)](https://github.com/gvozdikb)
- [Nina HÃ¥kansson (ninahakansson)](https://github.com/ninahakansson)
diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml
index 41fc50627a..48976401a2 100644
--- a/continuous_integration/environment.yaml
+++ b/continuous_integration/environment.yaml
@@ -5,10 +5,12 @@ dependencies:
- xarray!=2022.9.0
- dask
- distributed
+ - dask-image
- donfig
- appdirs
- toolz
- Cython
+ - numba
- sphinx
- cartopy
- panel>=0.12.7
diff --git a/doc/rtd_environment.yml b/doc/rtd_environment.yml
index 82168df77d..ce147a1644 100644
--- a/doc/rtd_environment.yml
+++ b/doc/rtd_environment.yml
@@ -6,11 +6,13 @@ dependencies:
- pip
- appdirs
- dask
+ - dask-image
- defusedxml
- donfig
# 2.19.1 seems to cause library linking issues
- eccodes>=2.20
- graphviz
+ - numba
- numpy
- pillow
- pooch
diff --git a/doc/source/composites.rst b/doc/source/composites.rst
index 4804aba0df..d0c494e414 100644
--- a/doc/source/composites.rst
+++ b/doc/source/composites.rst
@@ -173,7 +173,7 @@ In the case below, the image shows its day portion and day/night
transition with night portion blacked-out instead of transparent::
>>> from satpy.composites import DayNightCompositor
- >>> compositor = DayNightCompositor("dnc", lim_low=85., lim_high=88., day_night="day_only", need_alpha=False)
+ >>> compositor = DayNightCompositor("dnc", lim_low=85., lim_high=88., day_night="day_only", include_alpha=False)
>>> composite = compositor([local_scene['true_color'])
RealisticColors
diff --git a/doc/source/config.rst b/doc/source/config.rst
index 63da03dac7..b1777c9751 100644
--- a/doc/source/config.rst
+++ b/doc/source/config.rst
@@ -258,6 +258,23 @@ as part of the :func:`~satpy.modifiers.angles.get_angles` and
used by multiple modifiers and composites including the default rayleigh
correction.
+Clipping Negative Infrared Radiances
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+* **Environment variable**: ``SATPY_READERS__CLIP_NEGATIVE_RADIANCES``
+* **YAML/Config Key**: ``readers.clip_negative_radiances``
+* **Default**: False
+
+Whether to clip negative infrared radiances to the minimum allowable value before
+computing the brightness temperature.
+If ``clip_negative_radiances=False``, pixels with negative radiances will have
+``np.nan`` brightness temperatures.
+
+Clipping of negative radiances is currently implemented for the following readers:
+
+* ``abi_l1b``
+
+
Temporary Directory
^^^^^^^^^^^^^^^^^^^
diff --git a/satpy/_config.py b/satpy/_config.py
index 2b583c435c..4abc00aba2 100644
--- a/satpy/_config.py
+++ b/satpy/_config.py
@@ -62,6 +62,9 @@ def impr_files(module_name: str) -> Path:
'demo_data_dir': '.',
'download_aux': True,
'sensor_angles_position_preference': 'actual',
+ 'readers': {
+ 'clip_negative_radiances': False,
+ },
}
# Satpy main configuration object
diff --git a/satpy/_scene_converters.py b/satpy/_scene_converters.py
new file mode 100644
index 0000000000..25fe728b9f
--- /dev/null
+++ b/satpy/_scene_converters.py
@@ -0,0 +1,124 @@
+# Copyright (c) 2023 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Helper functions for converting the Scene object to some other object."""
+
+import xarray as xr
+
+from satpy.dataset import DataID
+
+
+def _get_dataarrays_from_identifiers(scn, identifiers):
+ """Return a list of DataArray based on a single or list of identifiers.
+
+ An identifier can be a DataID or a string with name of a valid DataID.
+ """
+ if isinstance(identifiers, (str, DataID)):
+ identifiers = [identifiers]
+
+ if identifiers is not None:
+ dataarrays = [scn[ds] for ds in identifiers]
+ else:
+ dataarrays = [scn._datasets.get(ds) for ds in scn._wishlist]
+ dataarrays = [dataarray for dataarray in dataarrays if dataarray is not None]
+ return dataarrays
+
+
+def to_xarray(scn,
+ datasets=None, # DataID
+ header_attrs=None,
+ exclude_attrs=None,
+ flatten_attrs=False,
+ pretty=True,
+ include_lonlats=True,
+ epoch=None,
+ include_orig_name=True,
+ numeric_name_prefix='CHANNEL_'):
+ """Merge all xr.DataArray(s) of a satpy.Scene to a CF-compliant xarray object.
+
+ If all Scene DataArrays are on the same area, it returns an xr.Dataset.
+ If Scene DataArrays are on different areas, currently it fails, although
+ in future we might return a DataTree object, grouped by area.
+
+ Parameters
+ ----------
+ scn: satpy.Scene
+ Satpy Scene.
+ datasets (iterable):
+ List of Satpy Scene datasets to include in the output xr.Dataset.
+ Elements can be string name, a wavelength as a number, a DataID,
+ or DataQuery object.
+ If None (the default), it include all loaded Scene datasets.
+ header_attrs:
+ Global attributes of the output xr.Dataset.
+ epoch (str):
+ Reference time for encoding the time coordinates (if available).
+ Example format: "seconds since 1970-01-01 00:00:00".
+ If None, the default reference time is retrieved using "from satpy.cf_writer import EPOCH"
+ flatten_attrs (bool):
+ If True, flatten dict-type attributes.
+ exclude_attrs (list):
+ List of xr.DataArray attribute names to be excluded.
+ include_lonlats (bool):
+ If True, it includes 'latitude' and 'longitude' coordinates.
+ If the 'area' attribute is a SwathDefinition, it always includes
+ latitude and longitude coordinates.
+ pretty (bool):
+ Don't modify coordinate names, if possible. Makes the file prettier,
+ but possibly less consistent.
+ include_orig_name (bool).
+ Include the original dataset name as a variable attribute in the xr.Dataset.
+ numeric_name_prefix (str):
+ Prefix to add the each variable with name starting with a digit.
+ Use '' or None to leave this out.
+
+ Returns
+ -------
+ ds, xr.Dataset
+ A CF-compliant xr.Dataset
+
+ """
+ from satpy.writers.cf_writer import EPOCH, collect_cf_datasets
+
+ if epoch is None:
+ epoch = EPOCH
+
+ # Get list of DataArrays
+ if datasets is None:
+ datasets = list(scn.keys()) # list all loaded DataIDs
+ list_dataarrays = _get_dataarrays_from_identifiers(scn, datasets)
+
+ # Check that some DataArray could be returned
+ if len(list_dataarrays) == 0:
+ return xr.Dataset()
+
+ # Collect xr.Dataset for each group
+ grouped_datasets, header_attrs = collect_cf_datasets(list_dataarrays=list_dataarrays,
+ header_attrs=header_attrs,
+ exclude_attrs=exclude_attrs,
+ flatten_attrs=flatten_attrs,
+ pretty=pretty,
+ include_lonlats=include_lonlats,
+ epoch=epoch,
+ include_orig_name=include_orig_name,
+ numeric_name_prefix=numeric_name_prefix,
+ groups=None)
+ if len(grouped_datasets) == 1:
+ ds = grouped_datasets[None]
+ return ds
+ else:
+ msg = """The Scene object contains datasets with different areas.
+ Resample the Scene to have matching dimensions using i.e. scn.resample(resampler="native") """
+ raise NotImplementedError(msg)
diff --git a/satpy/etc/composites/visir.yaml b/satpy/etc/composites/visir.yaml
index 8bd61ff7a8..0bb177e9ff 100644
--- a/satpy/etc/composites/visir.yaml
+++ b/satpy/etc/composites/visir.yaml
@@ -96,6 +96,11 @@ modifiers:
- solar_azimuth_angle
- solar_zenith_angle
+ median5x5:
+ modifier: !!python/name:satpy.modifiers.filters.Median
+ median_filter_params:
+ size: 5
+
composites:
airmass:
diff --git a/satpy/etc/readers/gms5-vissr_l1b.yaml b/satpy/etc/readers/gms5-vissr_l1b.yaml
new file mode 100644
index 0000000000..7bcca57399
--- /dev/null
+++ b/satpy/etc/readers/gms5-vissr_l1b.yaml
@@ -0,0 +1,99 @@
+reader:
+ name: gms5-vissr_l1b
+ short_name: GMS-5 VISSR L1b
+ long_name: GMS-5 VISSR Level 1b
+ description: >
+ Reader for GMS-5 VISSR Level 1b data. References:
+
+ - https://www.data.jma.go.jp/mscweb/en/operation/fig/VISSR_FORMAT_GMS-5.pdf
+ - https://www.data.jma.go.jp/mscweb/en/operation/fig/GMS_Users_Guide_3rd_Edition_Rev1.pdf
+
+ status: Alpha
+ supports_fsspec: true
+ sensors: [gms5-vissr]
+ default_channels: []
+ reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+
+file_types:
+ gms5_vissr_vis:
+ file_reader: !!python/name:satpy.readers.gms.gms5_vissr_l1b.GMS5VISSRFileHandler
+ file_patterns:
+ - 'VISSR_{start_time:%Y%m%d_%H%M}_VIS.{mode}.IMG'
+ - 'VISSR_{start_time:%Y%m%d_%H%M}_VIS.{mode}.IMG.gz'
+
+ gms5_vissr_ir1:
+ file_reader: !!python/name:satpy.readers.gms.gms5_vissr_l1b.GMS5VISSRFileHandler
+ file_patterns:
+ - 'VISSR_{start_time:%Y%m%d_%H%M}_IR1.{mode}.IMG'
+ - 'VISSR_{start_time:%Y%m%d_%H%M}_IR1.{mode}.IMG.gz'
+
+ gms5_vissr_ir2:
+ file_reader: !!python/name:satpy.readers.gms.gms5_vissr_l1b.GMS5VISSRFileHandler
+ file_patterns:
+ - 'VISSR_{start_time:%Y%m%d_%H%M}_IR2.{mode}.IMG'
+ - 'VISSR_{start_time:%Y%m%d_%H%M}_IR2.{mode}.IMG.gz'
+
+
+ gms5_vissr_ir3:
+ file_reader: !!python/name:satpy.readers.gms.gms5_vissr_l1b.GMS5VISSRFileHandler
+ file_patterns:
+ - 'VISSR_{start_time:%Y%m%d_%H%M}_IR3.{mode}.IMG'
+ - 'VISSR_{start_time:%Y%m%d_%H%M}_IR3.{mode}.IMG.gz'
+
+
+datasets:
+ VIS:
+ name: VIS
+ sensor: gms5-vissr
+ wavelength: [0.55, 0.73, 0.9]
+ resolution: 1250
+ calibration:
+ counts:
+ standard_name: counts
+ units: 1
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ file_type: gms5_vissr_vis
+
+ IR1:
+ name: IR1
+ sensor: gms5-vissr
+ wavelength: [10.5, 11.0, 11.5]
+ resolution: 5000
+ calibration:
+ counts:
+ standard_name: counts
+ units: 1
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ file_type: gms5_vissr_ir1
+
+ IR2:
+ name: IR2
+ sensor: gms5-vissr
+ wavelength: [11.5, 12.0, 12.5]
+ resolution: 5000
+ calibration:
+ counts:
+ standard_name: counts
+ units: 1
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ file_type: gms5_vissr_ir2
+
+ IR3:
+ name: IR3
+ sensor: gms5-vissr
+ wavelength: [6.5, 6.75, 7.0]
+ resolution: 5000
+ calibration:
+ counts:
+ standard_name: counts
+ units: 1
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ file_type: gms5_vissr_ir3
diff --git a/satpy/etc/readers/nwcsaf-pps_nc.yaml b/satpy/etc/readers/nwcsaf-pps_nc.yaml
index 67031d99dd..29fabf304c 100644
--- a/satpy/etc/readers/nwcsaf-pps_nc.yaml
+++ b/satpy/etc/readers/nwcsaf-pps_nc.yaml
@@ -350,4 +350,4 @@ datasets:
name: cmic_dcot
file_key: dcot
file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
- coordinates: [lon, lat]
\ No newline at end of file
+ coordinates: [lon, lat]
diff --git a/satpy/modifiers/filters.py b/satpy/modifiers/filters.py
new file mode 100644
index 0000000000..151082e723
--- /dev/null
+++ b/satpy/modifiers/filters.py
@@ -0,0 +1,34 @@
+"""Tests for image filters."""
+import logging
+
+import xarray as xr
+
+from satpy.modifiers import ModifierBase
+
+logger = logging.getLogger(__name__)
+
+
+class Median(ModifierBase):
+ """Apply a median filter to the band."""
+
+ def __init__(self, median_filter_params, **kwargs):
+ """Create the instance.
+
+ Args:
+ median_filter_params: The arguments to pass to dask-image's median_filter function. For example, {size: 3}
+ makes give the median filter a kernel of size 3.
+
+ """
+ self.median_filter_params = median_filter_params
+ super().__init__(**kwargs)
+
+ def __call__(self, arrays, **info):
+ """Get the median filtered band."""
+ from dask_image.ndfilters import median_filter
+
+ data = arrays[0]
+ logger.debug(f"Apply median filtering with parameters {self.median_filter_params}.")
+ res = xr.DataArray(median_filter(data.data, **self.median_filter_params),
+ dims=data.dims, attrs=data.attrs, coords=data.coords)
+ self.apply_modifier_info(data, res)
+ return res
diff --git a/satpy/readers/abi_l1b.py b/satpy/readers/abi_l1b.py
index d1ed730792..dafdc8a373 100644
--- a/satpy/readers/abi_l1b.py
+++ b/satpy/readers/abi_l1b.py
@@ -22,11 +22,11 @@
https://www.goes-r.gov/users/docs/PUG-L1b-vol3.pdf
"""
-
import logging
import numpy as np
+import satpy
from satpy.readers.abi_base import NC_ABI_BASE
logger = logging.getLogger(__name__)
@@ -35,9 +35,17 @@
class NC_ABI_L1B(NC_ABI_BASE):
"""File reader for individual ABI L1B NetCDF4 files."""
+ def __init__(self, filename, filename_info, filetype_info, clip_negative_radiances=None):
+ """Open the NetCDF file with xarray and prepare the Dataset for reading."""
+ super().__init__(filename, filename_info, filetype_info)
+ if clip_negative_radiances is None:
+ clip_negative_radiances = satpy.config.get("readers.clip_negative_radiances")
+ self.clip_negative_radiances = clip_negative_radiances
+
def get_dataset(self, key, info):
"""Load a dataset."""
logger.debug('Reading in get_dataset %s.', key['name'])
+
# For raw cal, don't apply scale and offset, return raw file counts
if key['calibration'] == 'counts':
radiances = self.nc['Rad'].copy()
@@ -139,6 +147,16 @@ def _vis_calibrate(self, data):
res.attrs['standard_name'] = 'toa_bidirectional_reflectance'
return res
+ def _get_minimum_radiance(self, data):
+ """Estimate minimum radiance from Rad DataArray."""
+ attrs = data.attrs
+ scale_factor = attrs["scale_factor"]
+ add_offset = attrs["add_offset"]
+ count_zero_rad = - add_offset / scale_factor
+ count_pos = np.ceil(count_zero_rad)
+ min_rad = count_pos * scale_factor + add_offset
+ return min_rad
+
def _ir_calibrate(self, data):
"""Calibrate IR channels to BT."""
fk1 = float(self["planck_fk1"])
@@ -146,6 +164,10 @@ def _ir_calibrate(self, data):
bc1 = float(self["planck_bc1"])
bc2 = float(self["planck_bc2"])
+ if self.clip_negative_radiances:
+ min_rad = self._get_minimum_radiance(data)
+ data = data.clip(min=min_rad)
+
res = (fk2 / np.log(fk1 / data + 1) - bc1) / bc2
res.attrs = data.attrs
res.attrs['units'] = 'K'
diff --git a/satpy/readers/geocat.py b/satpy/readers/geocat.py
index 3343e25533..5086cd899b 100644
--- a/satpy/readers/geocat.py
+++ b/satpy/readers/geocat.py
@@ -56,7 +56,30 @@
class GEOCATFileHandler(NetCDF4FileHandler):
- """GEOCAT netCDF4 file handler."""
+ """GEOCAT netCDF4 file handler.
+
+ **Loading data with decode_times=True**
+
+ By default, this reader will use ``xarray_kwargs={"engine": "netcdf4", "decode_times": False}``.
+ to match behavior of xarray when the geocat reader was first written. To use different options
+ use reader_kwargs when loading the Scene::
+
+ scene = satpy.Scene(filenames,
+ reader='geocat',
+ reader_kwargs={'xarray_kwargs': {'engine': 'netcdf4', 'decode_times': True}})
+ """
+
+ def __init__(self, filename, filename_info, filetype_info,
+ **kwargs):
+ """Open and perform initial investigation of NetCDF file."""
+ kwargs.setdefault('xarray_kwargs', {}).setdefault(
+ 'engine', "netcdf4")
+ kwargs.setdefault('xarray_kwargs', {}).setdefault(
+ 'decode_times', False)
+
+ super(GEOCATFileHandler, self).__init__(
+ filename, filename_info, filetype_info,
+ xarray_kwargs=kwargs["xarray_kwargs"])
sensors = {
'goes': 'goes_imager',
diff --git a/satpy/readers/gms/__init__.py b/satpy/readers/gms/__init__.py
new file mode 100644
index 0000000000..7b1f2041c3
--- /dev/null
+++ b/satpy/readers/gms/__init__.py
@@ -0,0 +1 @@
+"""GMS reader module."""
diff --git a/satpy/readers/gms/gms5_vissr_format.py b/satpy/readers/gms/gms5_vissr_format.py
new file mode 100644
index 0000000000..a5052097eb
--- /dev/null
+++ b/satpy/readers/gms/gms5_vissr_format.py
@@ -0,0 +1,397 @@
+"""GMS-5 VISSR archive data format.
+
+Reference: `VISSR Format Description`_
+
+.. _VISSR Format Description:
+ https://www.data.jma.go.jp/mscweb/en/operation/fig/VISSR_FORMAT_GMS-5.pdf
+"""
+
+import numpy as np
+
+U1 = ">u1"
+I2 = ">i2"
+I4 = ">i4"
+R4 = ">f4"
+R8 = ">f8"
+
+VIS_CHANNEL = "VIS"
+IR_CHANNEL = "IR"
+CHANNEL_TYPES = {
+ "VIS": VIS_CHANNEL,
+ "IR1": IR_CHANNEL,
+ "IR2": IR_CHANNEL,
+ "IR3": IR_CHANNEL,
+ "WV": IR_CHANNEL,
+}
+ALT_CHANNEL_NAMES = {"VIS": "VIS", "IR1": "IR1", "IR2": "IR2", "IR3": "WV"}
+BLOCK_SIZE_VIS = 13504
+BLOCK_SIZE_IR = 3664
+
+IMAGE_PARAM_ITEM_SIZE = 2688
+TIME = [("date", I4), ("time", I4)]
+CHANNELS = [("VIS", R4), ("IR1", R4), ("IR2", R4), ("WV", R4)]
+VISIR_SOLAR = [("VIS", R4), ("IR", R4)]
+
+CONTROL_BLOCK = np.dtype([('control_block_size', I2),
+ ('head_block_number_of_parameter_block', I2),
+ ('parameter_block_size', I2),
+ ('head_block_number_of_image_data', I2),
+ ('total_block_size_of_image_data', I2),
+ ('available_block_size_of_image_data', I2),
+ ('head_valid_line_number', I2),
+ ('final_valid_line_number', I2),
+ ('final_data_block_number', I2)])
+
+MODE_BLOCK_FRAME_PARAMETERS = [('bit_length', I4),
+ ('number_of_lines', I4),
+ ('number_of_pixels', I4),
+ ('stepping_angle', R4),
+ ('sampling_angle', R4),
+ ('lcw_pixel_size', I4),
+ ('doc_pixel_size', I4),
+ ('reserved', I4)]
+
+MODE_BLOCK = np.dtype([('satellite_number', I4),
+ ('satellite_name', '|S12'),
+ ('observation_time_ad', '|S16'),
+ ('observation_time_mjd', R8),
+ ('gms_operation_mode', I4),
+ ('dpc_operation_mode', I4),
+ ('vissr_observation_mode', I4),
+ ('scanner_selection', I4),
+ ('sensor_selection', I4),
+ ('sensor_mode', I4),
+ ('scan_frame_mode', I4),
+ ('scan_mode', I4),
+ ('upper_limit_of_scan_number', I4),
+ ('lower_limit_of_scan_number', I4),
+ ('equatorial_scan_line_number', I4),
+ ('spin_rate', R4),
+ ('vis_frame_parameters', MODE_BLOCK_FRAME_PARAMETERS),
+ ('ir_frame_parameters', MODE_BLOCK_FRAME_PARAMETERS),
+ ('satellite_height', R4),
+ ('earth_radius', R4),
+ ('ssp_longitude', R4),
+ ('reserved_1', I4, 9),
+ ('table_of_sensor_trouble', I4, 14),
+ ('reserved_2', I4, 36),
+ ('status_tables_of_data_relative_address_segment', I4, 60)])
+
+COORDINATE_CONVERSION_PARAMETERS = np.dtype([
+ ('data_segment', I4),
+ ('data_validity', I4),
+ ('data_generation_time', TIME),
+ ('scheduled_observation_time', R8),
+ ('stepping_angle_along_line', CHANNELS),
+ ('sampling_angle_along_pixel', CHANNELS),
+ ('central_line_number_of_vissr_frame', CHANNELS),
+ ('central_pixel_number_of_vissr_frame', CHANNELS),
+ ('pixel_difference_of_vissr_center_from_normal_position', CHANNELS),
+ ('number_of_sensor_elements', CHANNELS),
+ ('total_number_of_vissr_frame_lines', CHANNELS),
+ ('total_number_of_vissr_frame_pixels', CHANNELS),
+ ('vissr_misalignment', R4, (3,)),
+ ('matrix_of_misalignment', R4, (3, 3)),
+ ('parameters', [('judgement_of_observation_convergence_time', R4),
+ ('judgement_of_line_convergence', R4),
+ ('east_west_angle_of_sun_light_condense_prism', R4),
+ ('north_south_angle_of_sun_light_condense_prism', R4),
+ ('pi', R4),
+ ('pi_divided_by_180', R4),
+ ('180_divided_by_pi', R4),
+ ('equatorial_radius', R4),
+ ('oblateness_of_earth', R4),
+ ('eccentricity_of_earth_orbit', R4),
+ ('first_angle_of_vissr_observation_in_sdb', R4),
+ ('upper_limited_line_of_2nd_prism_for_vis_solar_observation', R4),
+ ('lower_limited_line_of_1st_prism_for_vis_solar_observation', R4),
+ ('upper_limited_line_of_3rd_prism_for_vis_solar_observation', R4),
+ ('lower_limited_line_of_2nd_prism_for_vis_solar_observation', R4)]),
+ ('solar_stepping_angle_along_line', VISIR_SOLAR),
+ ('solar_sampling_angle_along_pixel', VISIR_SOLAR),
+ ('solar_center_line_of_vissr_frame', VISIR_SOLAR),
+ ('solar_center_pixel_of_vissr_frame', VISIR_SOLAR),
+ ('solar_pixel_difference_of_vissr_center_from_normal_position', VISIR_SOLAR),
+ ('solar_number_of_sensor_elements', VISIR_SOLAR),
+ ('solar_total_number_of_vissr_frame_lines', VISIR_SOLAR),
+ ('solar_total_number_of_vissr_frame_pixels', VISIR_SOLAR),
+ ('reserved_1', I4, 19),
+ ('orbital_parameters', [('epoch_time', R8),
+ ('semi_major_axis', R8),
+ ('eccentricity', R8),
+ ('orbital_inclination', R8),
+ ('longitude_of_ascending_node', R8),
+ ('argument_of_perigee', R8),
+ ('mean_anomaly', R8),
+ ('longitude_of_ssp', R8),
+ ('latitude_of_ssp', R8)]),
+ ('reserved_2', I4, 2),
+ ('attitude_parameters', [('epoch_time', R8),
+ ('angle_between_z_axis_and_satellite_spin_axis_at_epoch_time', R8),
+ ('angle_change_rate_between_spin_axis_and_z_axis', R8),
+ ('angle_between_spin_axis_and_zy_axis', R8),
+ ('angle_change_rate_between_spin_axis_and_zt_axis', R8),
+ ('daily_mean_of_spin_rate', R8)]),
+ ('reserved_3', I4, 529),
+ ('correction_of_image_distortion', [('stepping_angle_along_line_of_ir1', R4),
+ ('stepping_angle_along_line_of_ir2', R4),
+ ('stepping_angle_along_line_of_wv', R4),
+ ('stepping_angle_along_line_of_vis', R4),
+ ('sampling_angle_along_pixel_of_ir1', R4),
+ ('sampling_angle_along_pixel_of_ir2', R4),
+ ('sampling_angle_along_pixel_of_wv', R4),
+ ('sampling_angle_along_pixel_of_vis', R4),
+ ('x_component_vissr_misalignment', R4),
+ ('y_component_vissr_misalignment', R4)])
+])
+
+ATTITUDE_PREDICTION_DATA = np.dtype([('prediction_time_mjd', R8),
+ ('prediction_time_utc', TIME),
+ ('right_ascension_of_attitude', R8),
+ ('declination_of_attitude', R8),
+ ('sun_earth_angle', R8),
+ ('spin_rate', R8),
+ ('right_ascension_of_orbital_plane', R8),
+ ('declination_of_orbital_plane', R8),
+ ('reserved', R8),
+ ('eclipse_flag', I4),
+ ('spin_axis_flag', I4)])
+
+ATTITUDE_PREDICTION = np.dtype([('data_segment', I4),
+ ('data_validity', I4),
+ ('data_generation_time', TIME),
+ ('start_time', R8),
+ ('end_time', R8),
+ ('prediction_interval_time', R8),
+ ('number_of_prediction', I4),
+ ('data_size', I4),
+ ('data', ATTITUDE_PREDICTION_DATA, (33,))])
+
+ORBIT_PREDICTION_DATA = [('prediction_time_mjd', R8),
+ ('prediction_time_utc', TIME),
+ ('satellite_position_1950', R8, (3,)),
+ ('satellite_velocity_1950', R8, (3,)),
+ ('satellite_position_earth_fixed', R8, (3,)),
+ ('satellite_velocity_earth_fixed', R8, (3,)),
+ ('greenwich_sidereal_time', R8),
+ ('sat_sun_vector_1950', [('azimuth', R8),
+ ('elevation', R8)]),
+ ('sat_sun_vector_earth_fixed', [('azimuth', R8),
+ ('elevation', R8)]),
+ ('conversion_matrix', R8, (3, 3)),
+ ('moon_directional_vector', R8, (3,)),
+ ('satellite_position', [('ssp_longitude', R8),
+ ('ssp_latitude', R8),
+ ('satellite_height', R8)]),
+ ('eclipse_period_flag', I4),
+ ('reserved', I4)]
+
+ORBIT_PREDICTION = np.dtype([('data_segment', I4),
+ ('data_validity', I4),
+ ('data_generation_time', TIME),
+ ('start_time', R8),
+ ('end_time', R8),
+ ('prediction_interval_time', R8),
+ ('number_of_prediction', I4),
+ ('data_size', I4),
+ ('data', ORBIT_PREDICTION_DATA, (9,))])
+
+VIS_CALIBRATION_TABLE = np.dtype([
+ ('channel_number', I4),
+ ('data_validity', I4),
+ ('updated_time', TIME),
+ ('table_id', I4),
+ ('brightness_albedo_conversion_table', R4, (64,)),
+ ('vis_channel_staircase_brightness_data', R4, (6,)),
+ ('coefficients_table_of_vis_staircase_regression_curve', R4, (10,)),
+ ('brightness_table_for_calibration', [('universal_space_brightness', R4),
+ ('solar_brightness', R4)]),
+ ('calibration_uses_brightness_correspondence_voltage_chart', [('universal_space_voltage', R4),
+ ('solar_voltage', R4)]),
+ ('calibration_coefficients_of_radiation_observation', [('G', R4), ('V0', R4)]),
+ ('reserved', I4, (9,))
+ ])
+
+VIS_CALIBRATION = np.dtype([('data_segment', I4),
+ ('data_validity', I4),
+ ('data_generation_time', TIME),
+ ('sensor_group', I4),
+ ('vis1_calibration_table', VIS_CALIBRATION_TABLE),
+ ('vis2_calibration_table', VIS_CALIBRATION_TABLE),
+ ('vis3_calibration_table', VIS_CALIBRATION_TABLE),
+ ('reserved', I4, (267,))])
+
+TELEMETRY_DATA = np.dtype([
+ ('shutter_temp', R4),
+ ('redundant_mirror_temp', R4),
+ ('primary_mirror_temp', R4),
+ ('baffle_fw_temp', R4),
+ ('baffle_af_temp', R4),
+ ('15_volt_auxiliary_power_supply', R4),
+ ('radiative_cooler_temp_1', R4),
+ ('radiative_cooler_temp_2', R4),
+ ('electronics_module_temp', R4),
+ ('scan_mirror_temp', R4),
+ ('shutter_cavity_temp', R4),
+ ('primary_mirror_sealed_temp', R4),
+ ('redundant_mirror_sealed_temp', R4),
+ ('shutter_temp_2', R4),
+ ('reserved', R4, (2,))
+])
+
+IR_CALIBRATION = np.dtype([
+ ('data_segment', I4),
+ ('data_validity', I4),
+ ('updated_time', TIME),
+ ('sensor_group', I4),
+ ('table_id', I4),
+ ('reserved_1', I4, (2,)),
+ ('conversion_table_of_equivalent_black_body_radiation', R4, (256,)),
+ ('conversion_table_of_equivalent_black_body_temperature', R4, (256,)),
+ ('staircase_brightness_data', R4, (6,)),
+ ('coefficients_table_of_staircase_regression_curve', R4, (10,)),
+ ('brightness_data_for_calibration', [('brightness_of_space', R4),
+ ('brightness_of_black_body_shutter', R4),
+ ('reserved', R4)]),
+ ('voltage_table_for_brightness_of_calibration', [('voltage_of_space', R4),
+ ('voltage_of_black_body_shutter', R4),
+ ('reserved', R4)]),
+ ('calibration_coefficients_of_radiation_observation', [('G', R4), ('V0', R4)]),
+ ('valid_shutter_temperature', R4),
+ ('valid_shutter_radiation', R4),
+ ('telemetry_data_table', TELEMETRY_DATA),
+ ('flag_of_calid_shutter_temperature_calculation', I4),
+ ('reserved_2', I4, (109,))
+])
+
+SIMPLE_COORDINATE_CONVERSION_TABLE = np.dtype([
+ ('coordinate_conversion_table', I2, (1250,)),
+ ('earth_equator_radius', R4),
+ ('satellite_height', R4),
+ ('stepping_angle', R4),
+ ('sampling_angle', R4),
+ ('ssp_latitude', R4),
+ ('ssp_longitude', R4),
+ ('ssp_line_number', R4),
+ ('ssp_pixel_number', R4),
+ ('pi', R4),
+ ('line_correction_ir1_vis', R4),
+ ('pixel_correction_ir1_vis', R4),
+ ('line_correction_ir1_ir2', R4),
+ ('pixel_correction_ir1_ir2', R4),
+ ('line_correction_ir1_wv', R4),
+ ('pixel_correction_ir1_wv', R4),
+ ('reserved', R4, (32,)),
+])
+
+IMAGE_PARAMS = {
+ 'mode': {
+ 'dtype': MODE_BLOCK,
+ 'offset': {
+ VIS_CHANNEL: 2 * BLOCK_SIZE_VIS,
+ IR_CHANNEL: 2 * BLOCK_SIZE_IR
+ }
+ },
+ 'coordinate_conversion': {
+ 'dtype': COORDINATE_CONVERSION_PARAMETERS,
+ 'offset': {
+ VIS_CHANNEL: 2 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE,
+ IR_CHANNEL: 4 * BLOCK_SIZE_IR
+ }
+ },
+ 'attitude_prediction': {
+ 'dtype': ATTITUDE_PREDICTION,
+ 'offset': {
+ VIS_CHANNEL: 2 * BLOCK_SIZE_VIS + 3 * IMAGE_PARAM_ITEM_SIZE,
+ IR_CHANNEL: 5 * BLOCK_SIZE_IR
+ },
+ 'preserve': 'data'
+ },
+ 'orbit_prediction_1': {
+ 'dtype': ORBIT_PREDICTION,
+ 'offset': {
+ VIS_CHANNEL: 3 * BLOCK_SIZE_VIS,
+ IR_CHANNEL: 6 * BLOCK_SIZE_IR
+ },
+ 'preserve': 'data'
+ },
+ 'orbit_prediction_2': {
+ 'dtype': ORBIT_PREDICTION,
+ 'offset': {
+ VIS_CHANNEL: 3 * BLOCK_SIZE_VIS + 1 * IMAGE_PARAM_ITEM_SIZE,
+ IR_CHANNEL: 7 * BLOCK_SIZE_IR
+ },
+ 'preserve': 'data'
+ },
+ 'vis_calibration': {
+ 'dtype': VIS_CALIBRATION,
+ 'offset': {
+ VIS_CHANNEL: 3 * BLOCK_SIZE_VIS + 3 * IMAGE_PARAM_ITEM_SIZE,
+ IR_CHANNEL: 9 * BLOCK_SIZE_IR
+ },
+ 'preserve': 'data'
+ },
+ 'ir1_calibration': {
+ 'dtype': IR_CALIBRATION,
+ 'offset': {
+ VIS_CHANNEL: 4 * BLOCK_SIZE_VIS,
+ IR_CHANNEL: 10 * BLOCK_SIZE_IR
+ },
+ },
+ 'ir2_calibration': {
+ 'dtype': IR_CALIBRATION,
+ 'offset': {
+ VIS_CHANNEL: 4 * BLOCK_SIZE_VIS + IMAGE_PARAM_ITEM_SIZE,
+ IR_CHANNEL: 11 * BLOCK_SIZE_IR
+ },
+ },
+ 'wv_calibration': {
+ 'dtype': IR_CALIBRATION,
+ 'offset': {
+ VIS_CHANNEL: 4 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE,
+ IR_CHANNEL: 12 * BLOCK_SIZE_IR
+ },
+ },
+ 'simple_coordinate_conversion_table': {
+ 'dtype': SIMPLE_COORDINATE_CONVERSION_TABLE,
+ 'offset': {
+ VIS_CHANNEL: 5 * BLOCK_SIZE_VIS + 2 * IMAGE_PARAM_ITEM_SIZE,
+ IR_CHANNEL: 16 * BLOCK_SIZE_IR
+ },
+ }
+}
+
+LINE_CONTROL_WORD = np.dtype([
+ ('data_id', U1, (4, )),
+ ('line_number', I4),
+ ('line_name', I4),
+ ('error_line_flag', I4),
+ ('error_message', I4),
+ ('mode_error_flag', I4),
+ ('scan_time', R8),
+ ('beta_angle', R4),
+ ('west_side_earth_edge', I4),
+ ('east_side_earth_edge', I4),
+ ('received_time_1', R8), # Typo in format description (I*4)
+ ('received_time_2', I4),
+ ('reserved', U1, (8, ))
+])
+
+IMAGE_DATA_BLOCK_IR = np.dtype([('LCW', LINE_CONTROL_WORD),
+ ('DOC', U1, (256,)), # Omitted
+ ('image_data', U1, 3344)])
+
+IMAGE_DATA_BLOCK_VIS = np.dtype([('LCW', LINE_CONTROL_WORD),
+ ('DOC', U1, (64,)), # Omitted
+ ('image_data', U1, (13376,))])
+
+IMAGE_DATA = {
+ VIS_CHANNEL: {
+ 'offset': 6 * BLOCK_SIZE_VIS,
+ 'dtype': IMAGE_DATA_BLOCK_VIS,
+ },
+ IR_CHANNEL: {
+ 'offset': 18 * BLOCK_SIZE_IR,
+ 'dtype': IMAGE_DATA_BLOCK_IR
+ }
+}
diff --git a/satpy/readers/gms/gms5_vissr_l1b.py b/satpy/readers/gms/gms5_vissr_l1b.py
new file mode 100644
index 0000000000..f3c6898f65
--- /dev/null
+++ b/satpy/readers/gms/gms5_vissr_l1b.py
@@ -0,0 +1,803 @@
+"""Reader for GMS-5 VISSR Level 1B data.
+
+Introduction
+------------
+The ``gms5_vissr_l1b`` reader can decode, navigate and calibrate Level 1B data
+from the Visible and Infrared Spin Scan Radiometer (VISSR) in `VISSR
+archive format`. Corresponding platforms are GMS-5 (Japanese Geostationary
+Meteorological Satellite) and GOES-09 (2003-2006 backup after MTSAT-1 launch
+failure).
+
+VISSR has four channels, each stored in a separate file:
+
+.. code-block:: none
+
+ VISSR_20020101_0031_IR1.A.IMG
+ VISSR_20020101_0031_IR2.A.IMG
+ VISSR_20020101_0031_IR3.A.IMG
+ VISSR_20020101_0031_VIS.A.IMG
+
+This is how to read them with Satpy:
+
+.. code-block:: python
+
+ from satpy import Scene
+ import glob
+
+ filenames = glob.glob(""/data/VISSR*")
+ scene = Scene(filenames, reader="gms5-vissr_l1b")
+ scene.load(["VIS", "IR1"])
+
+
+References
+~~~~~~~~~~
+
+Details about platform, instrument and data format can be found in the
+following references:
+
+ - `VISSR Format Description`_
+ - `GMS User Guide`_
+
+.. _VISSR Format Description:
+ https://www.data.jma.go.jp/mscweb/en/operation/fig/VISSR_FORMAT_GMS-5.pdf
+.. _GMS User Guide:
+ https://www.data.jma.go.jp/mscweb/en/operation/fig/GMS_Users_Guide_3rd_Edition_Rev1.pdf
+
+
+Compression
+-----------
+
+Gzip-compressed VISSR files can be decompressed on the fly using
+:class:`~satpy.readers.FSFile`:
+
+.. code-block:: python
+
+ import fsspec
+ from satpy import Scene
+ from satpy.readers import FSFile
+
+ filename = "VISSR_19960217_2331_IR1.A.IMG.gz"
+ open_file = fsspec.open(filename, compression="gzip")
+ fs_file = FSFile(open_file)
+ scene = Scene([fs_file], reader="gms5-vissr_l1b")
+ scene.load(["IR1"])
+
+
+Calibration
+-----------
+
+Sensor counts are calibrated by looking up reflectance/temperature values in the
+calibration tables included in each file. See section 2.2 in the VISSR user
+guide.
+
+
+Navigation
+----------
+
+VISSR images are oversampled and not rectified.
+
+
+Oversampling
+~~~~~~~~~~~~
+VISSR oversamples the viewed scene in E-W direction by a factor of ~1.46:
+IR/VIS pixels are 14/3.5 urad on a side, but the instrument samples every
+9.57/2.39 urad in E-W direction. That means pixels are actually overlapping on
+the ground.
+
+This cannot be represented by a pyresample area definition, so each dataset
+is accompanied by 2-dimensional longitude and latitude coordinates. For
+resampling purpose a full disc area definition with uniform sampling is provided
+via
+
+.. code-block:: python
+
+ scene[dataset].attrs["area_def_uniform_sampling"]
+
+
+Rectification
+~~~~~~~~~~~~~
+
+VISSR images are not rectified. That means lon/lat coordinates are different
+
+1) for all channels of the same repeat cycle, even if their spatial resolution
+ is identical (IR channels)
+2) for different repeat cycles, even if the channel is identical
+
+However, the above area definition is using the nominal subsatellite point as
+projection center. As this rarely changes, the area definition is pretty
+constant.
+
+
+Performance
+~~~~~~~~~~~
+
+Navigation of VISSR images is computationally expensive, because for each pixel
+the view vector of the (rotating) instrument needs to be intersected with the
+earth, including interpolation of attitude and orbit prediction. For IR channels
+this takes about 10 seconds, for VIS channels about 160 seconds.
+
+
+Space Pixels
+------------
+
+VISSR produces data for pixels outside the Earth disk (i.e. atmospheric limb or
+deep space pixels). By default, these pixels are masked out as they contain
+data of limited or no value, but some applications do require these pixels.
+To turn off masking, set ``mask_space=False`` upon scene creation:
+
+.. code-block:: python
+
+ import satpy
+ import glob
+
+ filenames = glob.glob("VISSR*.IMG")
+ scene = satpy.Scene(filenames,
+ reader="gms5-vissr_l1b",
+ reader_kwargs={"mask_space": False})
+ scene.load(["VIS", "IR1])
+
+
+Metadata
+--------
+
+Dataset attributes include metadata such as time and orbital parameters,
+see :ref:`dataset_metadata`.
+
+Partial Scans
+-------------
+
+Between 2001 and 2003 VISSR also recorded partial scans of the northern
+hemisphere. On demand a special Typhoon schedule would be activated between
+03:00 and 05:00 UTC.
+"""
+
+import datetime as dt
+
+import dask.array as da
+import numba
+import numpy as np
+import xarray as xr
+
+import satpy.readers._geos_area as geos_area
+import satpy.readers.gms.gms5_vissr_format as fmt
+import satpy.readers.gms.gms5_vissr_navigation as nav
+from satpy.readers.file_handlers import BaseFileHandler
+from satpy.readers.hrit_jma import mjd2datetime64
+from satpy.readers.utils import generic_open
+from satpy.utils import get_legacy_chunk_size
+
+CHUNK_SIZE = get_legacy_chunk_size()
+
+
+def _recarr2dict(arr, preserve=None):
+ if not preserve:
+ preserve = []
+ res = {}
+ for key, value in zip(arr.dtype.names, arr):
+ if key.startswith("reserved"):
+ continue
+ if value.dtype.names and key not in preserve:
+ # Nested record array
+ res[key] = _recarr2dict(value)
+ else:
+ # Scalar or record array that shall be preserved
+ res[key] = value
+ return res
+
+
+class GMS5VISSRFileHandler(BaseFileHandler):
+ """File handler for GMS-5 VISSR data in VISSR archive format."""
+
+ def __init__(self, filename, filename_info, filetype_info, mask_space=True):
+ """Initialize the file handler.
+
+ Args:
+ filename: Name of file to be read
+ filename_info: Information obtained from filename
+ filetype_info: Information about file type
+ mask_space: Mask space pixels.
+ """
+ super(GMS5VISSRFileHandler, self).__init__(
+ filename, filename_info, filetype_info
+ )
+ self._filename = filename
+ self._filename_info = filename_info
+ self._header, self._channel_type = self._read_header(filename)
+ self._mda = self._get_mda()
+ self._mask_space = mask_space
+
+ def _read_header(self, filename):
+ header = {}
+ with generic_open(filename, mode="rb") as file_obj:
+ header["control_block"] = self._read_control_block(file_obj)
+ channel_type = self._get_channel_type(
+ header["control_block"]["parameter_block_size"]
+ )
+ header["image_parameters"] = self._read_image_params(file_obj, channel_type)
+ return header, channel_type
+
+ @staticmethod
+ def _get_channel_type(parameter_block_size):
+ if parameter_block_size == 4:
+ return fmt.VIS_CHANNEL
+ elif parameter_block_size == 16:
+ return fmt.IR_CHANNEL
+ raise ValueError(
+ f"Cannot determine channel type, possibly corrupt file "
+ f"(unknown parameter block size: {parameter_block_size})"
+ )
+
+ def _read_control_block(self, file_obj):
+ ctrl_block = read_from_file_obj(file_obj, dtype=fmt.CONTROL_BLOCK, count=1)
+ return _recarr2dict(ctrl_block[0])
+
+ def _read_image_params(self, file_obj, channel_type):
+ """Read image parameters from the header."""
+ image_params = {}
+ for name, param in fmt.IMAGE_PARAMS.items():
+ image_params[name] = self._read_image_param(file_obj, param, channel_type)
+
+ image_params["orbit_prediction"] = self._concat_orbit_prediction(
+ image_params.pop("orbit_prediction_1"),
+ image_params.pop("orbit_prediction_2"),
+ )
+ return image_params
+
+ @staticmethod
+ def _read_image_param(file_obj, param, channel_type):
+ """Read a single image parameter block from the header."""
+ image_params = read_from_file_obj(
+ file_obj,
+ dtype=param["dtype"],
+ count=1,
+ offset=param["offset"][channel_type],
+ )
+ return _recarr2dict(image_params[0], preserve=param.get("preserve"))
+
+ @staticmethod
+ def _concat_orbit_prediction(orb_pred_1, orb_pred_2):
+ """Concatenate orbit prediction data.
+
+ It is split over two image parameter blocks in the header.
+ """
+ orb_pred = orb_pred_1
+ orb_pred["data"] = np.concatenate([orb_pred_1["data"], orb_pred_2["data"]])
+ return orb_pred
+
+ def _get_frame_parameters_key(self):
+ if self._channel_type == fmt.VIS_CHANNEL:
+ return "vis_frame_parameters"
+ return "ir_frame_parameters"
+
+ def _get_actual_shape(self):
+ actual_num_lines = self._header["control_block"][
+ "available_block_size_of_image_data"
+ ]
+ _, nominal_num_pixels = self._get_nominal_shape()
+ return actual_num_lines, nominal_num_pixels
+
+ def _get_nominal_shape(self):
+ frame_params = self._header["image_parameters"]["mode"][
+ self._get_frame_parameters_key()
+ ]
+ return frame_params["number_of_lines"], frame_params["number_of_pixels"]
+
+ def _get_mda(self):
+ return {
+ "platform": self._mode_block["satellite_name"].decode().strip().upper(),
+ "sensor": "VISSR",
+ "time_parameters": self._get_time_parameters(),
+ "orbital_parameters": self._get_orbital_parameters(),
+ }
+
+ def _get_orbital_parameters(self):
+ # Note: SSP longitude in simple coordinate conversion table seems to be
+ # incorrect (80 deg instead of 140 deg). Use orbital parameters instead.
+ im_params = self._header["image_parameters"]
+ mode = im_params["mode"]
+ simple_coord = im_params["simple_coordinate_conversion_table"]
+ orb_params = im_params["coordinate_conversion"]["orbital_parameters"]
+ return {
+ "satellite_nominal_longitude": mode["ssp_longitude"],
+ "satellite_nominal_latitude": 0.0,
+ "satellite_nominal_altitude": mode["satellite_height"],
+ "satellite_actual_longitude": orb_params["longitude_of_ssp"],
+ "satellite_actual_latitude": orb_params["latitude_of_ssp"],
+ "satellite_actual_altitude": simple_coord["satellite_height"],
+ }
+
+ def _get_time_parameters(self):
+ start_time = mjd2datetime64(self._mode_block["observation_time_mjd"])
+ start_time = start_time.astype(dt.datetime).replace(second=0, microsecond=0)
+ end_time = start_time + dt.timedelta(
+ minutes=25
+ ) # Source: GMS User Guide, section 3.3.1
+ return {
+ "nominal_start_time": start_time,
+ "nominal_end_time": end_time,
+ }
+
+ def get_dataset(self, dataset_id, ds_info):
+ """Get dataset from file."""
+ image_data = self._get_image_data()
+ counts = self._get_counts(image_data)
+ dataset = self._calibrate(counts, dataset_id)
+ space_masker = SpaceMasker(image_data, dataset_id["name"])
+ dataset = self._mask_space_pixels(dataset, space_masker)
+ self._attach_lons_lats(dataset, dataset_id)
+ self._update_attrs(dataset, dataset_id, ds_info)
+ return dataset
+
+ def _get_image_data(self):
+ image_data = self._read_image_data()
+ return da.from_array(image_data, chunks=(CHUNK_SIZE,))
+
+ def _read_image_data(self):
+ num_lines, _ = self._get_actual_shape()
+ specs = self._get_image_data_type_specs()
+ with generic_open(self._filename, "rb") as file_obj:
+ return read_from_file_obj(
+ file_obj, dtype=specs["dtype"], count=num_lines, offset=specs["offset"]
+ )
+
+ def _get_image_data_type_specs(self):
+ return fmt.IMAGE_DATA[self._channel_type]
+
+ def _get_counts(self, image_data):
+ return self._make_counts_data_array(image_data)
+
+ def _make_counts_data_array(self, image_data):
+ return xr.DataArray(
+ image_data["image_data"],
+ dims=("y", "x"),
+ coords={
+ "acq_time": ("y", self._get_acq_time(image_data)),
+ "line_number": ("y", self._get_line_number(image_data)),
+ },
+ )
+
+ def _get_acq_time(self, dask_array):
+ acq_time = dask_array["LCW"]["scan_time"].compute()
+ return mjd2datetime64(acq_time)
+
+ def _get_line_number(self, dask_array):
+ return dask_array["LCW"]["line_number"].compute()
+
+ def _calibrate(self, counts, dataset_id):
+ table = self._get_calibration_table(dataset_id)
+ cal = Calibrator(table)
+ return cal.calibrate(counts, dataset_id["calibration"])
+
+ def _get_calibration_table(self, dataset_id):
+ tables = {
+ "VIS": self._header["image_parameters"]["vis_calibration"][
+ "vis1_calibration_table"
+ ]["brightness_albedo_conversion_table"],
+ "IR1": self._header["image_parameters"]["ir1_calibration"][
+ "conversion_table_of_equivalent_black_body_temperature"
+ ],
+ "IR2": self._header["image_parameters"]["ir2_calibration"][
+ "conversion_table_of_equivalent_black_body_temperature"
+ ],
+ "IR3": self._header["image_parameters"]["wv_calibration"][
+ "conversion_table_of_equivalent_black_body_temperature"
+ ],
+ }
+ return tables[dataset_id["name"]]
+
+ def _get_area_def_uniform_sampling(self, dataset_id):
+ a = AreaDefEstimator(
+ coord_conv_params=self._header["image_parameters"]["coordinate_conversion"],
+ metadata=self._mda,
+ )
+ return a.get_area_def_uniform_sampling(dataset_id)
+
+ def _mask_space_pixels(self, dataset, space_masker):
+ if self._mask_space:
+ return space_masker.mask_space(dataset)
+ return dataset
+
+ def _attach_lons_lats(self, dataset, dataset_id):
+ lons, lats = self._get_lons_lats(dataset, dataset_id)
+ dataset.coords["lon"] = lons
+ dataset.coords["lat"] = lats
+
+ def _get_lons_lats(self, dataset, dataset_id):
+ lines, pixels = self._get_image_coords(dataset)
+ nav_params = self._get_navigation_parameters(dataset_id)
+ lons, lats = nav.get_lons_lats(lines, pixels, nav_params)
+ return self._make_lons_lats_data_array(lons, lats)
+
+ def _get_image_coords(self, data):
+ lines = data.coords["line_number"].values
+ pixels = np.arange(data.shape[1])
+ return lines.astype(np.float64), pixels.astype(np.float64)
+
+ def _get_navigation_parameters(self, dataset_id):
+ return nav.ImageNavigationParameters(
+ static=self._get_static_navigation_params(dataset_id),
+ predicted=self._get_predicted_navigation_params()
+ )
+
+ def _get_static_navigation_params(self, dataset_id):
+ """Get static navigation parameters.
+
+ Note that, "central_line_number_of_vissr_frame" is different for each
+ channel, even if their spatial resolution is identical. For example:
+
+ VIS: 5513.0
+ IR1: 1378.5
+ IR2: 1378.7
+ IR3: 1379.1001
+ """
+ alt_ch_name = _get_alternative_channel_name(dataset_id)
+ scan_params = nav.ScanningParameters(
+ start_time_of_scan=self._coord_conv["scheduled_observation_time"],
+ spinning_rate=self._mode_block["spin_rate"],
+ num_sensors=self._coord_conv["number_of_sensor_elements"][alt_ch_name],
+ sampling_angle=self._coord_conv["sampling_angle_along_pixel"][alt_ch_name],
+ )
+ proj_params = self._get_proj_params(dataset_id)
+ return nav.StaticNavigationParameters(
+ proj_params=proj_params,
+ scan_params=scan_params
+ )
+
+ def _get_proj_params(self, dataset_id):
+ proj_params = nav.ProjectionParameters(
+ image_offset=self._get_image_offset(dataset_id),
+ scanning_angles=self._get_scanning_angles(dataset_id),
+ earth_ellipsoid=self._get_earth_ellipsoid()
+ )
+ return proj_params
+
+ def _get_earth_ellipsoid(self):
+ # Use earth radius and flattening from JMA's Msial library, because
+ # the values in the data seem to be pretty old. For example the
+ # equatorial radius is from the Bessel Ellipsoid (1841).
+ return nav.EarthEllipsoid(
+ flattening=nav.EARTH_FLATTENING,
+ equatorial_radius=nav.EARTH_EQUATORIAL_RADIUS,
+ )
+
+ def _get_scanning_angles(self, dataset_id):
+ alt_ch_name = _get_alternative_channel_name(dataset_id)
+ misalignment = np.ascontiguousarray(
+ self._coord_conv["matrix_of_misalignment"].transpose().astype(np.float64)
+ )
+ return nav.ScanningAngles(
+ stepping_angle=self._coord_conv["stepping_angle_along_line"][alt_ch_name],
+ sampling_angle=self._coord_conv["sampling_angle_along_pixel"][
+ alt_ch_name],
+ misalignment=misalignment
+ )
+
+ def _get_image_offset(self, dataset_id):
+ alt_ch_name = _get_alternative_channel_name(dataset_id)
+ center_line_vissr_frame = self._coord_conv["central_line_number_of_vissr_frame"][
+ alt_ch_name
+ ]
+ center_pixel_vissr_frame = self._coord_conv["central_pixel_number_of_vissr_frame"][
+ alt_ch_name
+ ]
+ pixel_offset = self._coord_conv[
+ "pixel_difference_of_vissr_center_from_normal_position"
+ ][alt_ch_name]
+ return nav.ImageOffset(
+ line_offset=center_line_vissr_frame,
+ pixel_offset=center_pixel_vissr_frame + pixel_offset,
+ )
+
+ def _get_predicted_navigation_params(self):
+ """Get predictions of time-dependent navigation parameters."""
+ attitude_prediction = self._get_attitude_prediction()
+ orbit_prediction = self._get_orbit_prediction()
+ return nav.PredictedNavigationParameters(
+ attitude=attitude_prediction,
+ orbit=orbit_prediction
+ )
+
+ def _get_attitude_prediction(self):
+ att_pred = self._header["image_parameters"]["attitude_prediction"]["data"]
+ attitudes = nav.Attitude(
+ angle_between_earth_and_sun=att_pred["sun_earth_angle"].astype(
+ np.float64),
+ angle_between_sat_spin_and_z_axis=att_pred[
+ "right_ascension_of_attitude"
+ ].astype(np.float64),
+ angle_between_sat_spin_and_yz_plane=att_pred[
+ "declination_of_attitude"
+ ].astype(np.float64),
+ )
+ attitude_prediction = nav.AttitudePrediction(
+ prediction_times=att_pred["prediction_time_mjd"].astype(np.float64),
+ attitude=attitudes
+ )
+ return attitude_prediction
+
+ def _get_orbit_prediction(self):
+ orb_pred = self._header["image_parameters"]["orbit_prediction"]["data"]
+ orbit_angles = nav.OrbitAngles(
+ greenwich_sidereal_time=np.deg2rad(
+ orb_pred["greenwich_sidereal_time"].astype(np.float64)
+ ),
+ declination_from_sat_to_sun=np.deg2rad(
+ orb_pred["sat_sun_vector_earth_fixed"]["elevation"].astype(np.float64)
+ ),
+ right_ascension_from_sat_to_sun=np.deg2rad(
+ orb_pred["sat_sun_vector_earth_fixed"]["azimuth"].astype(np.float64)
+ ),
+ )
+ sat_position = nav.Satpos(
+ x=orb_pred["satellite_position_earth_fixed"][:, 0].astype(np.float64),
+ y=orb_pred["satellite_position_earth_fixed"][:, 1].astype(np.float64),
+ z=orb_pred["satellite_position_earth_fixed"][:, 2].astype(np.float64),
+ )
+ orbit_prediction = nav.OrbitPrediction(
+ prediction_times=orb_pred["prediction_time_mjd"].astype(np.float64),
+ angles=orbit_angles,
+ sat_position=sat_position,
+ nutation_precession=np.ascontiguousarray(
+ orb_pred["conversion_matrix"].transpose(0, 2, 1).astype(np.float64)
+ ),
+ )
+ return orbit_prediction
+
+ def _make_lons_lats_data_array(self, lons, lats):
+ lons = xr.DataArray(
+ lons,
+ dims=("y", "x"),
+ attrs={"standard_name": "longitude", "units": "degrees_east"},
+ )
+ lats = xr.DataArray(
+ lats,
+ dims=("y", "x"),
+ attrs={"standard_name": "latitude", "units": "degrees_north"},
+ )
+ return lons, lats
+
+ def _update_attrs(self, dataset, dataset_id, ds_info):
+ dataset.attrs.update(ds_info)
+ dataset.attrs.update(self._mda)
+ dataset.attrs[
+ "area_def_uniform_sampling"
+ ] = self._get_area_def_uniform_sampling(dataset_id)
+
+ @property
+ def start_time(self):
+ """Nominal start time of the dataset."""
+ return self._mda["time_parameters"]["nominal_start_time"]
+
+ @property
+ def end_time(self):
+ """Nominal end time of the dataset."""
+ return self._mda["time_parameters"]["nominal_end_time"]
+
+ @property
+ def _coord_conv(self):
+ return self._header["image_parameters"]["coordinate_conversion"]
+
+ @property
+ def _mode_block(self):
+ return self._header["image_parameters"]["mode"]
+
+
+def _get_alternative_channel_name(dataset_id):
+ return fmt.ALT_CHANNEL_NAMES[dataset_id["name"]]
+
+
+def read_from_file_obj(file_obj, dtype, count, offset=0):
+ """Read data from file object.
+
+ Args:
+ file_obj: An open file object.
+ dtype: Data type to be read.
+ count: Number of elements to be read.
+ offset: Byte offset where to start reading.
+ """
+ file_obj.seek(offset)
+ data = file_obj.read(dtype.itemsize * count)
+ return np.frombuffer(data, dtype=dtype, count=count)
+
+
+class Calibrator:
+ """Calibrate VISSR data to reflectance or brightness temperature.
+
+ Reference: Section 2.2 in the VISSR User Guide.
+ """
+
+ def __init__(self, calib_table):
+ """Initialize the calibrator.
+
+ Args:
+ calib_table: Calibration table
+ """
+ self._calib_table = calib_table
+
+ def calibrate(self, counts, calibration):
+ """Transform counts to given calibration level."""
+ if calibration == "counts":
+ return counts
+ res = self._calibrate(counts)
+ res = self._postproc(res, calibration)
+ return self._make_data_array(res, counts)
+
+ def _calibrate(self, counts):
+ return da.map_blocks(
+ self._lookup_calib_table,
+ counts.data,
+ calib_table=self._calib_table,
+ dtype=np.float32,
+ )
+
+ def _postproc(self, res, calibration):
+ if calibration == "reflectance":
+ res = self._convert_to_percent(res)
+ return res
+
+ def _convert_to_percent(self, res):
+ return res * 100
+
+ def _make_data_array(self, interp, counts):
+ return xr.DataArray(
+ interp,
+ dims=counts.dims,
+ coords=counts.coords,
+ )
+
+ def _lookup_calib_table(self, counts, calib_table):
+ return calib_table[counts]
+
+
+class SpaceMasker:
+ """Mask pixels outside the earth disk."""
+
+ _fill_value = -1 # scanline not intersecting the earth
+
+ def __init__(self, image_data, channel):
+ """Initialize the space masker.
+
+ Args:
+ image_data: Image data
+ channel: Channel name
+ """
+ self._image_data = image_data
+ self._channel = channel
+ self._shape = image_data["image_data"].shape
+ self._earth_mask = self._get_earth_mask()
+
+ def mask_space(self, dataset):
+ """Mask space pixels in the given dataset."""
+ return dataset.where(self._earth_mask).astype(np.float32)
+
+ def _get_earth_mask(self):
+ earth_edges = self._get_earth_edges()
+ return get_earth_mask(self._shape, earth_edges, self._fill_value)
+
+ def _get_earth_edges(self):
+ west_edges = self._get_earth_edges_per_scan_line("west_side_earth_edge")
+ east_edges = self._get_earth_edges_per_scan_line("east_side_earth_edge")
+ return west_edges, east_edges
+
+ def _get_earth_edges_per_scan_line(self, cardinal):
+ edges = self._image_data["LCW"][cardinal].compute().astype(np.int32)
+ if is_vis_channel(self._channel):
+ edges = self._correct_vis_edges(edges)
+ return edges
+
+ def _correct_vis_edges(self, edges):
+ """Correct VIS edges.
+
+ VIS data contains earth edges of IR channel. Compensate for that
+ by scaling with a factor of 4 (1 IR pixel ~ 4 VIS pixels).
+ """
+ return np.where(edges != self._fill_value, edges * 4, edges)
+
+
+@numba.njit
+def get_earth_mask(shape, earth_edges, fill_value=-1):
+ """Get binary mask where 1/0 indicates earth/space.
+
+ Args:
+ shape: Image shape
+ earth_edges: First and last earth pixel in each scanline
+ fill_value: Fill value for scanlines not intersecting the earth.
+ """
+ first_earth_pixels, last_earth_pixels = earth_edges
+ mask = np.zeros(shape, dtype=np.int8)
+ for line in range(shape[0]):
+ first = first_earth_pixels[line]
+ last = last_earth_pixels[line]
+ if first == fill_value or last == fill_value:
+ continue
+ mask[line, first:last+1] = 1
+ return mask
+
+
+def is_vis_channel(channel_name):
+ """Check if it's the visible channel."""
+ return channel_name == "VIS"
+
+
+class AreaDefEstimator:
+ """Estimate area definition for VISSR images."""
+
+ full_disk_size = {
+ "IR": 2366,
+ "VIS": 9464,
+ }
+
+ def __init__(self, coord_conv_params, metadata):
+ """Initialize the area definition estimator.
+
+ Args:
+ coord_conv_params: Coordinate conversion parameters
+ metadata: VISSR file metadata
+ """
+ self.coord_conv = coord_conv_params
+ self.metadata = metadata
+
+ def get_area_def_uniform_sampling(self, dataset_id):
+ """Get full disk area definition with uniform sampling.
+
+ Args:
+ dataset_id: ID of the corresponding dataset.
+ """
+ proj_dict = self._get_proj_dict(dataset_id)
+ extent = geos_area.get_area_extent(proj_dict)
+ return geos_area.get_area_definition(proj_dict, extent)
+
+ def _get_proj_dict(self, dataset_id):
+ proj_dict = {}
+ proj_dict.update(self._get_name_dict(dataset_id))
+ proj_dict.update(self._get_proj4_dict())
+ proj_dict.update(self._get_shape_dict(dataset_id))
+ return proj_dict
+
+ def _get_name_dict(self, dataset_id):
+ name_dict = geos_area.get_geos_area_naming(
+ {
+ "platform_name": self.metadata["platform"],
+ "instrument_name": self.metadata["sensor"],
+ "service_name": "western-pacific",
+ "service_desc": "Western Pacific",
+ "resolution": dataset_id["resolution"],
+ }
+ )
+ return {
+ "a_name": name_dict["area_id"],
+ "p_id": name_dict["area_id"],
+ "a_desc": name_dict["description"],
+ }
+
+ def _get_proj4_dict(
+ self,
+ ):
+ # Use nominal parameters to make the area def as constant as possible
+ return {
+ "ssp_lon": self.metadata["orbital_parameters"][
+ "satellite_nominal_longitude"
+ ],
+ "a": nav.EARTH_EQUATORIAL_RADIUS,
+ "b": nav.EARTH_POLAR_RADIUS,
+ "h": self.metadata["orbital_parameters"]["satellite_nominal_altitude"],
+ }
+
+ def _get_shape_dict(self, dataset_id):
+ # Apply sampling from the vertical dimension to the horizontal
+ # dimension to obtain a square area definition with uniform sampling.
+ ch_type = fmt.CHANNEL_TYPES[dataset_id["name"]]
+ alt_ch_name = _get_alternative_channel_name(dataset_id)
+ stepping_angle = self.coord_conv["stepping_angle_along_line"][alt_ch_name]
+ size = self.full_disk_size[ch_type]
+ line_pixel_offset = 0.5 * size
+ lfac_cfac = geos_area.sampling_to_lfac_cfac(stepping_angle)
+ return {
+ "nlines": size,
+ "ncols": size,
+ "lfac": lfac_cfac,
+ "cfac": lfac_cfac,
+ "coff": line_pixel_offset,
+ "loff": line_pixel_offset,
+ "scandir": "N2S",
+ }
diff --git a/satpy/readers/gms/gms5_vissr_navigation.py b/satpy/readers/gms/gms5_vissr_navigation.py
new file mode 100644
index 0000000000..8a811b2210
--- /dev/null
+++ b/satpy/readers/gms/gms5_vissr_navigation.py
@@ -0,0 +1,932 @@
+"""GMS-5 VISSR Navigation.
+
+Reference: `GMS User Guide`_, Appendix E, S-VISSR Mapping.
+
+.. _GMS User Guide:
+ https://www.data.jma.go.jp/mscweb/en/operation/fig/GMS_Users_Guide_3rd_Edition_Rev1.pdf
+"""
+
+from collections import namedtuple
+
+import dask.array as da
+import numba
+import numpy as np
+
+from satpy.utils import get_legacy_chunk_size
+
+CHUNK_SIZE = get_legacy_chunk_size()
+
+EARTH_FLATTENING = 1 / 298.257
+EARTH_EQUATORIAL_RADIUS = 6378136.0
+EARTH_POLAR_RADIUS = EARTH_EQUATORIAL_RADIUS * (1 - EARTH_FLATTENING)
+"""Constants taken from JMA's Msial library."""
+
+
+Pixel = namedtuple(
+ "Pixel",
+ ["line", "pixel"]
+)
+"""A VISSR pixel."""
+
+Vector2D = namedtuple(
+ "Vector2D",
+ ["x", "y"]
+)
+"""A 2D vector."""
+
+
+Vector3D = namedtuple(
+ "Vector3D",
+ ["x", "y", "z"]
+)
+"""A 3D vector."""
+
+
+Satpos = namedtuple(
+ "Satpos",
+ ["x", "y", "z"]
+)
+"""A 3D vector."""
+
+
+Attitude = namedtuple(
+ "Attitude",
+ [
+ "angle_between_earth_and_sun",
+ "angle_between_sat_spin_and_z_axis",
+ "angle_between_sat_spin_and_yz_plane",
+ ],
+)
+"""Attitude parameters.
+
+Units: radians
+"""
+
+
+Orbit = namedtuple(
+ "Orbit",
+ [
+ "angles",
+ "sat_position",
+ "nutation_precession",
+ ],
+)
+"""Orbital Parameters
+
+Args:
+ angles (OrbitAngles): Orbit angles
+ sat_position (Vector3D): Satellite position
+ nutation_precession: Nutation and precession matrix (3x3)
+"""
+
+
+OrbitAngles = namedtuple(
+ "OrbitAngles",
+ [
+ "greenwich_sidereal_time",
+ "declination_from_sat_to_sun",
+ "right_ascension_from_sat_to_sun",
+ ],
+)
+"""Orbit angles.
+
+Units: radians
+"""
+
+
+ImageNavigationParameters = namedtuple(
+ "ImageNavigationParameters",
+ ["static", "predicted"]
+)
+"""Navigation parameters for the entire image.
+
+Args:
+ static (StaticNavigationParameters): Static parameters.
+ predicted (PredictedNavigationParameters): Predicted time-dependent parameters.
+"""
+
+
+PixelNavigationParameters = namedtuple(
+ "PixelNavigationParameters",
+ ["attitude", "orbit", "proj_params"]
+)
+"""Navigation parameters for a single pixel.
+
+Args:
+ attitude (Attitude): Attitude parameters
+ orbit (Orbit): Orbit parameters
+ proj_params (ProjectionParameters): Projection parameters
+"""
+
+
+StaticNavigationParameters = namedtuple(
+ "StaticNavigationParameters",
+ [
+ "proj_params",
+ "scan_params"
+ ]
+)
+"""Navigation parameters which are constant for the entire scan.
+
+Args:
+ proj_params (ProjectionParameters): Projection parameters
+ scan_params (ScanningParameters): Scanning parameters
+"""
+
+
+PredictedNavigationParameters = namedtuple(
+ "PredictedNavigationParameters",
+ [
+ "attitude",
+ "orbit"
+ ]
+)
+"""Predictions of time-dependent navigation parameters.
+
+They need to be evaluated for each pixel.
+
+Args:
+ attitude (AttitudePrediction): Attitude prediction
+ orbit (OrbitPrediction): Orbit prediction
+"""
+
+
+ScanningParameters = namedtuple(
+ "ScanningParameters",
+ [
+ "start_time_of_scan",
+ "spinning_rate",
+ "num_sensors",
+ "sampling_angle"
+ ],
+)
+
+
+ProjectionParameters = namedtuple(
+ "ProjectionParameters",
+ [
+ "image_offset",
+ "scanning_angles",
+ "earth_ellipsoid",
+ ],
+)
+"""Projection parameters.
+
+Args:
+ image_offset (ImageOffset): Image offset
+ scanning_angles (ScanningAngles): Scanning angles
+ earth_ellipsoid (EarthEllipsoid): Earth ellipsoid
+"""
+
+
+ImageOffset = namedtuple(
+ "ImageOffset",
+ [
+ "line_offset",
+ "pixel_offset",
+ ]
+)
+"""Image offset
+
+Args:
+ line_offset: Line offset from image center
+ pixel_offset: Pixel offset from image center
+"""
+
+
+ScanningAngles = namedtuple(
+ "ScanningAngles",
+ [
+ "stepping_angle",
+ "sampling_angle",
+ "misalignment"
+ ]
+)
+"""Scanning angles
+
+Args:
+ stepping_angle: Scanning angle along line (rad)
+ sampling_angle: Scanning angle along pixel (rad)
+ misalignment: Misalignment matrix (3x3)
+"""
+
+
+EarthEllipsoid = namedtuple(
+ "EarthEllipsoid",
+ [
+ "flattening",
+ "equatorial_radius"
+ ]
+)
+"""Earth ellipsoid.
+
+Args:
+ flattening: Ellipsoid flattening
+ equatorial_radius: Equatorial radius (meters)
+"""
+
+
+_AttitudePrediction = namedtuple(
+ "_AttitudePrediction",
+ [
+ "prediction_times",
+ "attitude"
+ ],
+)
+
+
+_OrbitPrediction = namedtuple(
+ "_OrbitPrediction",
+ [
+ "prediction_times",
+ "angles",
+ "sat_position",
+ "nutation_precession",
+ ],
+)
+
+
+class AttitudePrediction:
+ """Attitude prediction.
+
+ Use .to_numba() to pass this object to jitted methods. This extra
+ layer avoids usage of jitclasses and having to re-implement np.unwrap in
+ numba.
+ """
+
+ def __init__(
+ self,
+ prediction_times,
+ attitude
+ ):
+ """Initialize attitude prediction.
+
+ In order to accelerate interpolation, the 2-pi periodicity of angles
+ is unwrapped here already (that means phase jumps greater than pi
+ are wrapped to their 2*pi complement).
+
+ Args:
+ prediction_times: Timestamps of predicted attitudes
+ attitude (Attitude): Attitudes at prediction times
+ """
+ self.prediction_times = prediction_times
+ self.attitude = self._unwrap_angles(attitude)
+
+ def _unwrap_angles(self, attitude):
+ return Attitude(
+ np.unwrap(attitude.angle_between_earth_and_sun),
+ np.unwrap(attitude.angle_between_sat_spin_and_z_axis),
+ np.unwrap(attitude.angle_between_sat_spin_and_yz_plane),
+ )
+
+ def to_numba(self):
+ """Convert to numba-compatible type."""
+ return _AttitudePrediction(
+ prediction_times=self.prediction_times,
+ attitude=self.attitude
+ )
+
+
+class OrbitPrediction:
+ """Orbit prediction.
+
+ Use .to_numba() to pass this object to jitted methods. This extra
+ layer avoids usage of jitclasses and having to re-implement np.unwrap in
+ numba.
+ """
+
+ def __init__(
+ self,
+ prediction_times,
+ angles,
+ sat_position,
+ nutation_precession,
+ ):
+ """Initialize orbit prediction.
+
+ In order to accelerate interpolation, the 2-pi periodicity of angles
+ is unwrapped here already (that means phase jumps greater than pi
+ are wrapped to their 2*pi complement).
+
+ Args:
+ prediction_times: Timestamps of orbit prediction.
+ angles (OrbitAngles): Orbit angles
+ sat_position (Vector3D): Satellite position
+ nutation_precession: Nutation and precession matrix.
+ """
+ self.prediction_times = prediction_times
+ self.angles = self._unwrap_angles(angles)
+ self.sat_position = sat_position
+ self.nutation_precession = nutation_precession
+
+ def _unwrap_angles(self, angles):
+ return OrbitAngles(
+ greenwich_sidereal_time=np.unwrap(angles.greenwich_sidereal_time),
+ declination_from_sat_to_sun=np.unwrap(angles.declination_from_sat_to_sun),
+ right_ascension_from_sat_to_sun=np.unwrap(
+ angles.right_ascension_from_sat_to_sun
+ ),
+ )
+
+ def to_numba(self):
+ """Convert to numba-compatible type."""
+ return _OrbitPrediction(
+ prediction_times=self.prediction_times,
+ angles=self.angles,
+ sat_position=self.sat_position,
+ nutation_precession=self.nutation_precession,
+ )
+
+
+def get_lons_lats(lines, pixels, nav_params):
+ """Compute lon/lat coordinates given VISSR image coordinates.
+
+ Args:
+ lines: VISSR image lines
+ pixels: VISSR image pixels
+ nav_params: Image navigation parameters
+ """
+ pixels_2d, lines_2d = da.meshgrid(pixels, lines)
+ lons, lats = da.map_blocks(
+ _get_lons_lats_numba,
+ lines_2d,
+ pixels_2d,
+ nav_params=_make_nav_params_numba_compatible(nav_params),
+ **_get_map_blocks_kwargs(pixels_2d.chunks)
+ )
+ return lons, lats
+
+
+def _make_nav_params_numba_compatible(nav_params):
+ predicted = PredictedNavigationParameters(
+ attitude=nav_params.predicted.attitude.to_numba(),
+ orbit=nav_params.predicted.orbit.to_numba()
+ )
+ return ImageNavigationParameters(nav_params.static, predicted)
+
+
+def _get_map_blocks_kwargs(chunks):
+ # Get keyword arguments for da.map_blocks, so that it can be used
+ # with a function that returns two arguments.
+ return {
+ "new_axis": 0,
+ "chunks": (2,) + chunks,
+ "dtype": np.float32,
+ }
+
+
+@numba.njit
+def _get_lons_lats_numba(lines_2d, pixels_2d, nav_params):
+ shape = lines_2d.shape
+ lons = np.zeros(shape, dtype=np.float32)
+ lats = np.zeros(shape, dtype=np.float32)
+ for i in range(shape[0]):
+ for j in range(shape[1]):
+ pixel = Pixel(lines_2d[i, j], pixels_2d[i, j])
+ nav_params_pix = _get_pixel_navigation_parameters(
+ pixel, nav_params
+ )
+ lon, lat = get_lon_lat(pixel, nav_params_pix)
+ lons[i, j] = lon
+ lats[i, j] = lat
+ # Stack lons and lats because da.map_blocks doesn't support multiple
+ # return values.
+ return np.stack((lons, lats))
+
+
+@numba.njit
+def _get_pixel_navigation_parameters(point, im_nav_params):
+ obs_time = get_observation_time(point, im_nav_params.static.scan_params)
+ attitude, orbit = interpolate_navigation_prediction(
+ attitude_prediction=im_nav_params.predicted.attitude,
+ orbit_prediction=im_nav_params.predicted.orbit,
+ observation_time=obs_time
+ )
+ return PixelNavigationParameters(
+ attitude=attitude,
+ orbit=orbit,
+ proj_params=im_nav_params.static.proj_params
+ )
+
+
+@numba.njit
+def get_observation_time(point, scan_params):
+ """Calculate observation time of a VISSR pixel."""
+ relative_time = _get_relative_observation_time(point, scan_params)
+ return scan_params.start_time_of_scan + relative_time
+
+
+@numba.njit
+def _get_relative_observation_time(point, scan_params):
+ line, pixel = point
+ pixel = pixel + 1
+ line = line + 1
+ spinning_freq = 1440 * scan_params.spinning_rate
+ line_step = np.floor((line - 1) / scan_params.num_sensors)
+ pixel_step = (scan_params.sampling_angle * pixel) / (2 * np.pi)
+ return (line_step + pixel_step) / spinning_freq
+
+
+@numba.njit
+def interpolate_navigation_prediction(
+ attitude_prediction, orbit_prediction, observation_time
+):
+ """Interpolate predicted navigation parameters."""
+ attitude = interpolate_attitude_prediction(attitude_prediction, observation_time)
+ orbit = interpolate_orbit_prediction(orbit_prediction, observation_time)
+ return attitude, orbit
+
+
+@numba.njit
+def get_lon_lat(pixel, nav_params):
+ """Get longitude and latitude coordinates for a given image pixel.
+
+ Args:
+ pixel (Pixel): Point in image coordinates.
+ nav_params (PixelNavigationParameters): Navigation parameters for a
+ single pixel.
+ Returns:
+ Longitude and latitude in degrees.
+ """
+ scan_angles = transform_image_coords_to_scanning_angles(
+ pixel,
+ nav_params.proj_params.image_offset,
+ nav_params.proj_params.scanning_angles
+ )
+ view_vector_sat = transform_scanning_angles_to_satellite_coords(
+ scan_angles,
+ nav_params.proj_params.scanning_angles.misalignment
+ )
+ view_vector_earth_fixed = transform_satellite_to_earth_fixed_coords(
+ view_vector_sat,
+ nav_params.orbit,
+ nav_params.attitude
+ )
+ point_on_earth = intersect_with_earth(
+ view_vector_earth_fixed,
+ nav_params.orbit.sat_position,
+ nav_params.proj_params.earth_ellipsoid
+ )
+ lon, lat = transform_earth_fixed_to_geodetic_coords(
+ point_on_earth, nav_params.proj_params.earth_ellipsoid.flattening
+ )
+ return lon, lat
+
+
+@numba.njit
+def transform_image_coords_to_scanning_angles(point, image_offset, scanning_angles):
+ """Transform image coordinates to scanning angles.
+
+ Args:
+ point (Pixel): Point in image coordinates.
+ image_offset (ImageOffset): Image offset.
+ scanning_angles (ScanningAngles): Scanning angles.
+ Returns:
+ Scanning angles (x, y) at the pixel center (rad).
+ """
+ line_offset = image_offset.line_offset
+ pixel_offset = image_offset.pixel_offset
+ stepping_angle = scanning_angles.stepping_angle
+ sampling_angle = scanning_angles.sampling_angle
+ x = sampling_angle * (point.pixel + 1 - pixel_offset)
+ y = stepping_angle * (point.line + 1 - line_offset)
+ return Vector2D(x, y)
+
+
+@numba.njit
+def transform_scanning_angles_to_satellite_coords(angles, misalignment):
+ """Transform scanning angles to satellite angular momentum coordinates.
+
+ Args:
+ angles (Vector2D): Scanning angles in radians.
+ misalignment: Misalignment matrix (3x3)
+
+ Returns:
+ View vector (Vector3D) in satellite angular momentum coordinates.
+ """
+ x, y = angles.x, angles.y
+ sin_x = np.sin(x)
+ cos_x = np.cos(x)
+ view = Vector3D(np.cos(y), 0.0, np.sin(y))
+
+ # Correct for misalignment
+ view = matrix_vector(misalignment, view)
+
+ # Rotate around z-axis
+ return Vector3D(
+ cos_x * view.x - sin_x * view.y,
+ sin_x * view.x + cos_x * view.y,
+ view.z
+ )
+
+
+@numba.njit
+def transform_satellite_to_earth_fixed_coords(
+ point,
+ orbit,
+ attitude
+):
+ """Transform from earth-fixed to satellite angular momentum coordinates.
+
+ Args:
+ point (Vector3D): Point in satellite angular momentum coordinates.
+ orbit (Orbit): Orbital parameters
+ attitude (Attitude): Attitude parameters
+ Returns:
+ Point (Vector3D) in earth-fixed coordinates.
+ """
+ unit_vector_z = _get_satellite_unit_vector_z(attitude, orbit)
+ unit_vector_x = _get_satellite_unit_vector_x(unit_vector_z, attitude, orbit)
+ unit_vector_y = _get_satellite_unit_vector_y(unit_vector_x, unit_vector_z)
+ return _get_earth_fixed_coords(
+ point,
+ unit_vector_x,
+ unit_vector_y,
+ unit_vector_z
+ )
+
+
+@numba.njit
+def _get_satellite_unit_vector_z(attitude, orbit):
+ v1950 = _get_satellite_z_axis_1950(
+ attitude.angle_between_sat_spin_and_z_axis,
+ attitude.angle_between_sat_spin_and_yz_plane
+ )
+ vcorr = _correct_nutation_precession(
+ v1950,
+ orbit.nutation_precession
+ )
+ return _rotate_to_greenwich(
+ vcorr,
+ orbit.angles.greenwich_sidereal_time
+ )
+
+
+@numba.njit
+def _get_satellite_z_axis_1950(
+ angle_between_sat_spin_and_z_axis,
+ angle_between_sat_spin_and_yz_plane
+):
+ """Get satellite z-axis (spin) in mean of 1950 coordinates."""
+ alpha = angle_between_sat_spin_and_z_axis
+ delta = angle_between_sat_spin_and_yz_plane
+ cos_delta = np.cos(delta)
+ return Vector3D(
+ x=np.sin(delta),
+ y=-cos_delta * np.sin(alpha),
+ z=cos_delta * np.cos(alpha)
+ )
+
+
+@numba.njit
+def _correct_nutation_precession(vector, nutation_precession):
+ return matrix_vector(nutation_precession, vector)
+
+
+@numba.njit
+def _rotate_to_greenwich(vector, greenwich_sidereal_time):
+ cos_sid = np.cos(greenwich_sidereal_time)
+ sin_sid = np.sin(greenwich_sidereal_time)
+ rotated = Vector3D(
+ x=cos_sid * vector.x + sin_sid * vector.y,
+ y=-sin_sid * vector.x + cos_sid * vector.y,
+ z=vector.z
+ )
+ return normalize_vector(rotated)
+
+
+@numba.njit
+def _get_satellite_unit_vector_x(unit_vector_z, attitude, orbit):
+ sat_sun_vec = _get_vector_from_satellite_to_sun(
+ orbit.angles.declination_from_sat_to_sun,
+ orbit.angles.right_ascension_from_sat_to_sun
+ )
+ return _get_unit_vector_x(
+ sat_sun_vec,
+ unit_vector_z,
+ attitude.angle_between_earth_and_sun
+ )
+
+
+@numba.njit
+def _get_vector_from_satellite_to_sun(
+ declination_from_sat_to_sun,
+ right_ascension_from_sat_to_sun
+):
+ declination = declination_from_sat_to_sun
+ right_ascension = right_ascension_from_sat_to_sun
+ cos_declination = np.cos(declination)
+ return Vector3D(
+ x=cos_declination * np.cos(right_ascension),
+ y=cos_declination * np.sin(right_ascension),
+ z=np.sin(declination)
+ )
+
+
+@numba.njit
+def _get_unit_vector_x(
+ sat_sun_vec,
+ unit_vector_z,
+ angle_between_earth_and_sun
+
+):
+ beta = angle_between_earth_and_sun
+ sin_beta = np.sin(beta)
+ cos_beta = np.cos(beta)
+ cross1 = _get_uz_cross_satsun(unit_vector_z, sat_sun_vec)
+ cross2 = cross_product(cross1, unit_vector_z)
+ unit_vector_x = Vector3D(
+ x=sin_beta * cross1.x + cos_beta * cross2.x,
+ y=sin_beta * cross1.y + cos_beta * cross2.y,
+ z=sin_beta * cross1.z + cos_beta * cross2.z
+ )
+ return normalize_vector(unit_vector_x)
+
+
+@numba.njit
+def _get_uz_cross_satsun(unit_vector_z, sat_sun_vec):
+ res = cross_product(unit_vector_z, sat_sun_vec)
+ return normalize_vector(res)
+
+
+@numba.njit
+def _get_satellite_unit_vector_y(unit_vector_x, unit_vector_z):
+ res = cross_product(unit_vector_z, unit_vector_x)
+ return normalize_vector(res)
+
+
+@numba.njit
+def _get_earth_fixed_coords(point, unit_vector_x, unit_vector_y, unit_vector_z):
+ ux, uy, uz = unit_vector_x, unit_vector_y, unit_vector_z
+ # Multiply with matrix of satellite unit vectors [ux, uy, uz]
+ return Vector3D(
+ x=ux.x * point.x + uy.x * point.y + uz.x * point.z,
+ y=ux.y * point.x + uy.y * point.y + uz.y * point.z,
+ z=ux.z * point.x + uy.z * point.y + uz.z * point.z
+ )
+
+
+@numba.njit
+def intersect_with_earth(view_vector, sat_pos, ellipsoid):
+ """Intersect instrument viewing vector with the earth's surface.
+
+ Reference: Appendix E, section 2.11 in the GMS user guide.
+
+ Args:
+ view_vector (Vector3D): Instrument viewing vector in earth-fixed
+ coordinates.
+ sat_pos (Vector3D): Satellite position in earth-fixed coordinates.
+ ellipsoid (EarthEllipsoid): Earth ellipsoid.
+ Returns:
+ Intersection (Vector3D) with the earth's surface.
+ """
+ distance = _get_distance_to_intersection(view_vector, sat_pos, ellipsoid)
+ return Vector3D(
+ sat_pos.x + distance * view_vector.x,
+ sat_pos.y + distance * view_vector.y,
+ sat_pos.z + distance * view_vector.z
+ )
+
+
+@numba.njit
+def _get_distance_to_intersection(view_vector, sat_pos, ellipsoid):
+ """Get distance to intersection with the earth.
+
+ If the instrument is pointing towards the earth, there will be two
+ intersections with the surface. Choose the one on the instrument-facing
+ side of the earth.
+ """
+ d1, d2 = _get_distances_to_intersections(view_vector, sat_pos, ellipsoid)
+ return min(d1, d2)
+
+
+@numba.njit
+def _get_distances_to_intersections(view_vector, sat_pos, ellipsoid):
+ """Get distances to intersections with the earth's surface.
+
+ Returns:
+ Distances to two intersections with the surface.
+ """
+ a, b, c = _get_abc_helper(view_vector, sat_pos, ellipsoid)
+ tmp = np.sqrt((b**2 - a * c))
+ dist_1 = (-b + tmp) / a
+ dist_2 = (-b - tmp) / a
+ return dist_1, dist_2
+
+
+@numba.njit
+def _get_abc_helper(view_vector, sat_pos, ellipsoid):
+ """Get a,b,c helper variables.
+
+ Reference: Appendix E, Equation (26) in the GMS user guide.
+ """
+ flat2 = (1 - ellipsoid.flattening) ** 2
+ ux, uy, uz = view_vector.x, view_vector.y, view_vector.z
+ x, y, z = sat_pos.x, sat_pos.y, sat_pos.z
+ a = flat2 * (ux ** 2 + uy ** 2) + uz ** 2
+ b = flat2 * (x * ux + y * uy) + z * uz
+ c = flat2 * (x ** 2 + y ** 2 - ellipsoid.equatorial_radius ** 2) + z ** 2
+ return a, b, c
+
+
+@numba.njit
+def transform_earth_fixed_to_geodetic_coords(point, earth_flattening):
+ """Transform from earth-fixed to geodetic coordinates.
+
+ Args:
+ point (Vector3D): Point in earth-fixed coordinates.
+ earth_flattening: Flattening of the earth.
+
+ Returns:
+ Geodetic longitude and latitude (degrees).
+ """
+ x, y, z = point.x, point.y, point.z
+ f = earth_flattening
+ lon = np.arctan2(y, x)
+ lat = np.arctan2(z, ((1 - f) ** 2 * np.sqrt(x**2 + y**2)))
+ return np.rad2deg(lon), np.rad2deg(lat)
+
+
+@numba.njit
+def interpolate_orbit_prediction(orbit_prediction, observation_time):
+ """Interpolate orbit prediction at the given observation time."""
+ angles = _interpolate_orbit_angles(observation_time, orbit_prediction)
+ sat_position = _interpolate_sat_position(observation_time, orbit_prediction)
+ nutation_precession = interpolate_nearest(
+ observation_time,
+ orbit_prediction.prediction_times,
+ orbit_prediction.nutation_precession,
+ )
+ return Orbit(
+ angles=angles,
+ sat_position=sat_position,
+ nutation_precession=nutation_precession,
+ )
+
+
+@numba.njit
+def _interpolate_orbit_angles(observation_time, orbit_prediction):
+ sidereal_time = interpolate_angles(
+ observation_time,
+ orbit_prediction.prediction_times,
+ orbit_prediction.angles.greenwich_sidereal_time,
+ )
+ declination = interpolate_angles(
+ observation_time,
+ orbit_prediction.prediction_times,
+ orbit_prediction.angles.declination_from_sat_to_sun,
+ )
+ right_ascension = interpolate_angles(
+ observation_time,
+ orbit_prediction.prediction_times,
+ orbit_prediction.angles.right_ascension_from_sat_to_sun,
+ )
+ return OrbitAngles(
+ greenwich_sidereal_time=sidereal_time,
+ declination_from_sat_to_sun=declination,
+ right_ascension_from_sat_to_sun=right_ascension,
+ )
+
+
+@numba.njit
+def _interpolate_sat_position(observation_time, orbit_prediction):
+ x = interpolate_continuous(
+ observation_time,
+ orbit_prediction.prediction_times,
+ orbit_prediction.sat_position.x,
+ )
+ y = interpolate_continuous(
+ observation_time,
+ orbit_prediction.prediction_times,
+ orbit_prediction.sat_position.y,
+ )
+ z = interpolate_continuous(
+ observation_time,
+ orbit_prediction.prediction_times,
+ orbit_prediction.sat_position.z,
+ )
+ return Vector3D(x, y, z)
+
+
+@numba.njit
+def interpolate_attitude_prediction(attitude_prediction, observation_time):
+ """Interpolate attitude prediction at given observation time."""
+ angle_between_earth_and_sun = interpolate_angles(
+ observation_time,
+ attitude_prediction.prediction_times,
+ attitude_prediction.attitude.angle_between_earth_and_sun,
+ )
+ angle_between_sat_spin_and_z_axis = interpolate_angles(
+ observation_time,
+ attitude_prediction.prediction_times,
+ attitude_prediction.attitude.angle_between_sat_spin_and_z_axis,
+ )
+ angle_between_sat_spin_and_yz_plane = interpolate_angles(
+ observation_time,
+ attitude_prediction.prediction_times,
+ attitude_prediction.attitude.angle_between_sat_spin_and_yz_plane,
+ )
+ return Attitude(
+ angle_between_earth_and_sun,
+ angle_between_sat_spin_and_z_axis,
+ angle_between_sat_spin_and_yz_plane,
+ )
+
+
+@numba.njit
+def interpolate_continuous(x, x_sample, y_sample):
+ """Linear interpolation of continuous quantities.
+
+ Numpy equivalent would be np.interp(..., left=np.nan, right=np.nan), but
+ numba currently doesn't support those keyword arguments.
+ """
+ try:
+ return _interpolate(x, x_sample, y_sample)
+ except Exception:
+ # Numba cannot distinguish exception types
+ return np.nan
+
+
+@numba.njit
+def _interpolate(x, x_sample, y_sample):
+ i = _find_enclosing_index(x, x_sample)
+ offset = y_sample[i]
+ x_diff = x_sample[i + 1] - x_sample[i]
+ y_diff = y_sample[i + 1] - y_sample[i]
+ slope = y_diff / x_diff
+ dist = x - x_sample[i]
+ return offset + slope * dist
+
+
+@numba.njit
+def _find_enclosing_index(x, x_sample):
+ """Find where x_sample encloses x."""
+ for i in range(len(x_sample) - 1):
+ if x_sample[i] <= x < x_sample[i + 1]:
+ return i
+ raise Exception("x not enclosed by x_sample")
+
+
+@numba.njit
+def interpolate_angles(x, x_sample, y_sample):
+ """Linear interpolation of angles.
+
+ Requires 2-pi periodicity to be unwrapped before (for
+ performance reasons). Interpolated angles are wrapped
+ back to [-pi, pi] to restore periodicity.
+ """
+ return _wrap_2pi(interpolate_continuous(x, x_sample, y_sample))
+
+
+@numba.njit
+def _wrap_2pi(values):
+ """Wrap values to interval [-pi, pi].
+
+ Source: https://stackoverflow.com/a/15927914/5703449
+ """
+ return (values + np.pi) % (2 * np.pi) - np.pi
+
+
+@numba.njit
+def interpolate_nearest(x, x_sample, y_sample):
+ """Nearest neighbour interpolation."""
+ try:
+ return _interpolate_nearest(x, x_sample, y_sample)
+ except Exception:
+ return np.nan * np.ones_like(y_sample[0])
+
+
+@numba.njit
+def _interpolate_nearest(x, x_sample, y_sample):
+ i = _find_enclosing_index(x, x_sample)
+ return y_sample[i]
+
+
+@numba.njit
+def matrix_vector(m, v):
+ """Multiply (3,3)-matrix and Vector3D."""
+ x = m[0, 0] * v.x + m[0, 1] * v.y + m[0, 2] * v.z
+ y = m[1, 0] * v.x + m[1, 1] * v.y + m[1, 2] * v.z
+ z = m[2, 0] * v.x + m[2, 1] * v.y + m[2, 2] * v.z
+ return Vector3D(x, y, z)
+
+
+@numba.njit
+def cross_product(a, b):
+ """Compute vector product a x b."""
+ return Vector3D(
+ x=a.y * b.z - a.z * b.y,
+ y=a.z * b.x - a.x * b.z,
+ z=a.x * b.y - a.y * b.x
+ )
+
+
+@numba.njit
+def normalize_vector(v):
+ """Normalize a Vector3D."""
+ norm = np.sqrt(v.x**2 + v.y**2 + v.z**2)
+ return Vector3D(
+ v.x / norm,
+ v.y / norm,
+ v.z / norm
+ )
diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py
index 89d54ab3d0..cdad865f0c 100644
--- a/satpy/readers/seviri_l1b_native.py
+++ b/satpy/readers/seviri_l1b_native.py
@@ -370,7 +370,9 @@ def _read_header(self):
self.mda['hrv_number_of_lines'] = int(sec15hd["NumberLinesHRV"]['Value'])
self.mda['hrv_number_of_columns'] = cols_hrv
- if self.header['15_MAIN_PRODUCT_HEADER']['QQOV']['Value'] == 'NOK':
+ if '15_MAIN_PRODUCT_HEADER' not in self.header:
+ logger.info("Quality flag check was not possible due to missing 15_MAIN_PRODUCT_HEADER.")
+ elif self.header['15_MAIN_PRODUCT_HEADER']['QQOV']['Value'] == 'NOK':
warnings.warn(
"The quality flag for this file indicates not OK. "
"Use this data with caution!",
diff --git a/satpy/scene.py b/satpy/scene.py
index d43c9d80d2..e3e71811e9 100644
--- a/satpy/scene.py
+++ b/satpy/scene.py
@@ -41,6 +41,25 @@
LOG = logging.getLogger(__name__)
+def _get_area_resolution(area):
+ """Attempt to retrieve resolution from AreaDefinition."""
+ try:
+ resolution = max(area.pixel_size_x, area.pixel_size_y)
+ except AttributeError:
+ resolution = max(area.lats.attrs["resolution"], area.lons.attrs["resolution"])
+ return resolution
+
+
+def _aggregate_data_array(data_array, func, **coarsen_kwargs):
+ """Aggregate xr.DataArray."""
+ res = data_array.coarsen(**coarsen_kwargs)
+ if callable(func):
+ out = res.reduce(func)
+ else:
+ out = getattr(res, func)()
+ return out
+
+
class DelayedGeneration(KeyError):
"""Mark that a dataset can't be generated without further modification."""
@@ -755,10 +774,10 @@ def aggregate(self, dataset_ids=None, boundary='trim', side='left', func='mean',
Args:
dataset_ids (iterable): DataIDs to include in the returned
`Scene`. Defaults to all datasets.
- func (string): Function to apply on each aggregation window. One of
+ func (string, callable): Function to apply on each aggregation window. One of
'mean', 'sum', 'min', 'max', 'median', 'argmin',
- 'argmax', 'prod', 'std', 'var'.
- 'mean' is the default.
+ 'argmax', 'prod', 'std', 'var' strings or a custom
+ function. 'mean' is the default.
boundary: See :meth:`xarray.DataArray.coarsen`, 'trim' by default.
side: See :meth:`xarray.DataArray.coarsen`, 'left' by default.
dim_kwargs: the size of the windows to aggregate.
@@ -783,18 +802,16 @@ def aggregate(self, dataset_ids=None, boundary='trim', side='left', func='mean',
continue
target_area = src_area.aggregate(boundary=boundary, **dim_kwargs)
- try:
- resolution = max(target_area.pixel_size_x, target_area.pixel_size_y)
- except AttributeError:
- resolution = max(target_area.lats.resolution, target_area.lons.resolution)
+ resolution = _get_area_resolution(target_area)
for ds_id in ds_ids:
- res = self[ds_id].coarsen(boundary=boundary, side=side, **dim_kwargs)
-
- new_scn._datasets[ds_id] = getattr(res, func)()
+ new_scn._datasets[ds_id] = _aggregate_data_array(self[ds_id],
+ func=func,
+ boundary=boundary,
+ side=side,
+ **dim_kwargs)
new_scn._datasets[ds_id].attrs = self[ds_id].attrs.copy()
new_scn._datasets[ds_id].attrs['area'] = target_area
new_scn._datasets[ds_id].attrs['resolution'] = resolution
-
return new_scn
def get(self, key, default=None):
@@ -1059,7 +1076,9 @@ def to_xarray_dataset(self, datasets=None):
Returns: :class:`xarray.Dataset`
"""
- dataarrays = self._get_dataarrays_from_identifiers(datasets)
+ from satpy._scene_converters import _get_dataarrays_from_identifiers
+
+ dataarrays = _get_dataarrays_from_identifiers(self, datasets)
if len(dataarrays) == 0:
return xr.Dataset()
@@ -1081,13 +1100,70 @@ def to_xarray_dataset(self, datasets=None):
ds.attrs = mdata
return ds
- def _get_dataarrays_from_identifiers(self, identifiers):
- if identifiers is not None:
- dataarrays = [self[ds] for ds in identifiers]
- else:
- dataarrays = [self._datasets.get(ds) for ds in self._wishlist]
- dataarrays = [ds for ds in dataarrays if ds is not None]
- return dataarrays
+ def to_xarray(self,
+ datasets=None, # DataID
+ header_attrs=None,
+ exclude_attrs=None,
+ flatten_attrs=False,
+ pretty=True,
+ include_lonlats=True,
+ epoch=None,
+ include_orig_name=True,
+ numeric_name_prefix='CHANNEL_'):
+ """Merge all xr.DataArray(s) of a satpy.Scene to a CF-compliant xarray object.
+
+ If all Scene DataArrays are on the same area, it returns an xr.Dataset.
+ If Scene DataArrays are on different areas, currently it fails, although
+ in future we might return a DataTree object, grouped by area.
+
+ Parameters
+ ----------
+ datasets (iterable):
+ List of Satpy Scene datasets to include in the output xr.Dataset.
+ Elements can be string name, a wavelength as a number, a DataID,
+ or DataQuery object.
+ If None (the default), it include all loaded Scene datasets.
+ header_attrs:
+ Global attributes of the output xr.Dataset.
+ epoch (str):
+ Reference time for encoding the time coordinates (if available).
+ Example format: "seconds since 1970-01-01 00:00:00".
+ If None, the default reference time is retrieved using "from satpy.cf_writer import EPOCH"
+ flatten_attrs (bool):
+ If True, flatten dict-type attributes.
+ exclude_attrs (list):
+ List of xr.DataArray attribute names to be excluded.
+ include_lonlats (bool):
+ If True, it includes 'latitude' and 'longitude' coordinates.
+ If the 'area' attribute is a SwathDefinition, it always includes
+ latitude and longitude coordinates.
+ pretty (bool):
+ Don't modify coordinate names, if possible. Makes the file prettier,
+ but possibly less consistent.
+ include_orig_name (bool).
+ Include the original dataset name as a variable attribute in the xr.Dataset.
+ numeric_name_prefix (str):
+ Prefix to add the each variable with name starting with a digit.
+ Use '' or None to leave this out.
+
+ Returns
+ -------
+ ds, xr.Dataset
+ A CF-compliant xr.Dataset
+
+ """
+ from satpy._scene_converters import to_xarray
+
+ return to_xarray(scn=self,
+ datasets=datasets, # DataID
+ header_attrs=header_attrs,
+ exclude_attrs=exclude_attrs,
+ flatten_attrs=flatten_attrs,
+ pretty=pretty,
+ include_lonlats=include_lonlats,
+ epoch=epoch,
+ include_orig_name=include_orig_name,
+ numeric_name_prefix=numeric_name_prefix)
def images(self):
"""Generate images for all the datasets from the scene."""
@@ -1188,7 +1264,9 @@ def save_datasets(self, writer=None, filename=None, datasets=None, compute=True,
close any objects that have a "close" method.
"""
- dataarrays = self._get_dataarrays_from_identifiers(datasets)
+ from satpy._scene_converters import _get_dataarrays_from_identifiers
+
+ dataarrays = _get_dataarrays_from_identifiers(self, datasets)
if not dataarrays:
raise RuntimeError("None of the requested datasets have been "
"generated or could not be loaded. Requested "
diff --git a/satpy/tests/modifier_tests/test_filters.py b/satpy/tests/modifier_tests/test_filters.py
new file mode 100644
index 0000000000..62e732d300
--- /dev/null
+++ b/satpy/tests/modifier_tests/test_filters.py
@@ -0,0 +1,35 @@
+"""Implementation of some image filters."""
+
+import logging
+
+import dask.array as da
+import numpy as np
+import xarray as xr
+
+from satpy.modifiers.filters import Median
+
+
+def test_median(caplog):
+ """Test the median filter modifier."""
+ caplog.set_level(logging.DEBUG)
+ dims = "y", "x"
+ coordinates = dict(x=np.arange(6), y=np.arange(6))
+ attrs = dict(units="K")
+ median_filter_params = dict(size=3)
+ name = "median_filter"
+ median_filter = Median(median_filter_params, name=name)
+ array = xr.DataArray(da.arange(36).reshape((6, 6)), coords=coordinates, dims=dims, attrs=attrs)
+ res = median_filter([array])
+ filtered_array = np.array([[1, 2, 3, 4, 5, 5],
+ [6, 7, 8, 9, 10, 11],
+ [12, 13, 14, 15, 16, 17],
+ [18, 19, 20, 21, 22, 23],
+ [24, 25, 26, 27, 28, 29],
+ [30, 30, 31, 32, 33, 34]])
+ np.testing.assert_allclose(res, filtered_array)
+ assert res.dims == dims
+ assert attrs.items() <= res.attrs.items()
+ assert res.attrs["name"] == name
+ np.testing.assert_equal(res.coords["x"], coordinates["x"])
+ np.testing.assert_equal(res.coords["y"], coordinates["y"])
+ assert "Apply median filtering with parameters {'size': 3}" in caplog.text
diff --git a/satpy/tests/reader_tests/gms/__init__.py b/satpy/tests/reader_tests/gms/__init__.py
new file mode 100644
index 0000000000..d37bb755ca
--- /dev/null
+++ b/satpy/tests/reader_tests/gms/__init__.py
@@ -0,0 +1 @@
+"""Unit tests for GMS reader."""
diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_data.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_data.py
new file mode 100644
index 0000000000..5ddf13438e
--- /dev/null
+++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_data.py
@@ -0,0 +1,812 @@
+"""Real world test data for GMS-5 VISSR unit tests."""
+
+import numpy as np
+
+import satpy.readers.gms.gms5_vissr_format as fmt
+
+ATTITUDE_PREDICTION = np.array(
+ [
+ (
+ 50130.93055556,
+ (19960217, 222000),
+ 3.14911863,
+ 0.00054604,
+ 4.3324597,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50130.93402778,
+ (19960217, 222500),
+ 3.14911863,
+ 0.00054604,
+ 4.31064812,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50130.9375,
+ (19960217, 223000),
+ 3.14911863,
+ 0.00054604,
+ 4.28883633,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50130.94097222,
+ (19960217, 223500),
+ 3.14911863,
+ 0.00054604,
+ 4.26702432,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50130.94444444,
+ (19960217, 224000),
+ 3.14911863,
+ 0.00054604,
+ 4.2452121,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50130.94791667,
+ (19960217, 224500),
+ 3.14911863,
+ 0.00054604,
+ 4.22339966,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50130.95138889,
+ (19960217, 225000),
+ 3.14911863,
+ 0.00054604,
+ 4.201587,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50130.95486111,
+ (19960217, 225500),
+ 3.14911863,
+ 0.00054604,
+ 4.17977411,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50130.95833333,
+ (19960217, 230000),
+ 3.14911863,
+ 0.00054604,
+ 4.157961,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50130.96180556,
+ (19960217, 230500),
+ 3.14911863,
+ 0.00054604,
+ 4.13614765,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50130.96527778,
+ (19960217, 231000),
+ 3.14911863,
+ 0.00054604,
+ 4.11433408,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50130.96875,
+ (19960217, 231500),
+ 3.14911863,
+ 0.00054604,
+ 4.09252027,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50130.97222222,
+ (19960217, 232000),
+ 3.14911863,
+ 0.00054604,
+ 4.07070622,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50130.97569444,
+ (19960217, 232500),
+ 3.14911863,
+ 0.00054604,
+ 4.04889193,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50130.97916667,
+ (19960217, 233000),
+ 3.14911863,
+ 0.00054604,
+ 4.02707741,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50130.98263889,
+ (19960217, 233500),
+ 3.14911863,
+ 0.00054604,
+ 4.00526265,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50130.98611111,
+ (19960217, 234000),
+ 3.14911863,
+ 0.00054604,
+ 3.98344765,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50130.98958333,
+ (19960217, 234500),
+ 3.14911863,
+ 0.00054604,
+ 3.96163241,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50130.99305556,
+ (19960217, 235000),
+ 3.14911863,
+ 0.00054604,
+ 3.93981692,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50130.99652778,
+ (19960217, 235500),
+ 3.14911863,
+ 0.00054604,
+ 3.9180012,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50131.0,
+ (19960218, 0),
+ 3.14911863,
+ 0.00054604,
+ 3.89618523,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50131.00347222,
+ (19960218, 500),
+ 3.14911863,
+ 0.00054604,
+ 3.87436903,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50131.00694444,
+ (19960218, 1000),
+ 3.14911863,
+ 0.00054604,
+ 3.85255258,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50131.01041667,
+ (19960218, 1500),
+ 3.14911863,
+ 0.00054604,
+ 3.8307359,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50131.01388889,
+ (19960218, 2000),
+ 3.14911863,
+ 0.00054604,
+ 3.80891898,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50131.01736111,
+ (19960218, 2500),
+ 3.14911863,
+ 0.00054604,
+ 3.78710182,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50131.02083333,
+ (19960218, 3000),
+ 3.14911863,
+ 0.00054604,
+ 3.76528442,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50131.02430556,
+ (19960218, 3500),
+ 3.14911863,
+ 0.00054604,
+ 3.74346679,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50131.02777778,
+ (19960218, 4000),
+ 3.14911863,
+ 0.00054604,
+ 3.72164893,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50131.03125,
+ (19960218, 4500),
+ 3.14911863,
+ 0.00054604,
+ 3.69983084,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50131.03472222,
+ (19960218, 5000),
+ 3.14911863,
+ 0.00054604,
+ 3.67801252,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50131.03819444,
+ (19960218, 5500),
+ 3.14911863,
+ 0.00054604,
+ 3.65619398,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ (
+ 50131.04166667,
+ (19960218, 10000),
+ 3.14911863,
+ 0.00054604,
+ 3.63437521,
+ 99.21774527,
+ 0.97415452,
+ -1.56984055,
+ 0.0,
+ 0,
+ 0,
+ ),
+ ],
+ dtype=fmt.ATTITUDE_PREDICTION_DATA,
+)
+
+ORBIT_PREDICTION_1 = np.array(
+ [
+ (
+ 50130.96180556,
+ (960217, 230500),
+ [2247604.14185506, -42110997.39399951, -276688.79765022],
+ [3069.77904265, 164.12584895, 3.65437628],
+ [-32392525.09983424, 27002204.93121811, -263873.25702763],
+ [0.81859376, 0.6760037, 17.44588753],
+ 133.46391815,
+ (330.12326803, -12.19424863),
+ (197.27884747, -11.96904141),
+ [
+ [9.99936382e-01, 1.03449318e-02, 4.49611916e-03],
+ [-1.03447475e-02, 9.99946490e-01, -6.42483646e-05],
+ [-4.49654321e-03, 1.77330598e-05, 9.99989890e-01],
+ ],
+ [2.46885475e08, -2.07840219e08, -7.66028692e07],
+ (-0.35887085, 140.18562594, 35793706.31768975),
+ 0,
+ 0,
+ ),
+ (
+ 50130.96527778,
+ (960217, 231000),
+ [3167927.33749398, -42051692.51095297, -275526.52514815],
+ [3065.46435995, 231.22434208, 4.09379482],
+ [-32392279.4626506, 27002405.27592725, -258576.96255205],
+ [0.81939962, 0.66017389, 17.86159393],
+ 134.71734048,
+ (330.12643276, -12.19310271),
+ (196.02858456, -11.9678881),
+ [
+ [9.99936382e-01, 1.03449336e-02, 4.49611993e-03],
+ [-1.03447493e-02, 9.99946490e-01, -6.42473793e-05],
+ [-4.49654398e-03, 1.77320586e-05, 9.99989890e-01],
+ ],
+ [2.46204142e08, -2.07689897e08, -7.65268207e07],
+ (-0.35166851, 140.18520316, 35793613.0815237),
+ 0,
+ 0,
+ ),
+ (
+ 50130.96875,
+ (960217, 231500),
+ [4086736.12968183, -41972273.80964861, -274232.7185828],
+ [3059.68341675, 298.21262775, 4.53123515],
+ [-32392033.65156128, 27002600.83510851, -253157.23498394],
+ [0.81975174, 0.6441, 18.26873686],
+ 135.97076281,
+ (330.12959087, -12.19195587),
+ (194.77831505, -11.96673388),
+ [
+ [9.99936382e-01, 1.03449353e-02, 4.49612071e-03],
+ [-1.03447510e-02, 9.99946490e-01, -6.42463940e-05],
+ [-4.49654474e-03, 1.77310575e-05, 9.99989890e-01],
+ ],
+ [2.45524133e08, -2.07559497e08, -7.64508451e07],
+ (-0.3442983, 140.18478523, 35793516.57370046),
+ 0,
+ 0,
+ ),
+ (
+ 50130.97222222,
+ (960217, 232000),
+ [5003591.03339227, -41872779.15809826, -272808.0027587],
+ [3052.43895532, 365.05867777, 4.9664885],
+ [-32391787.80234722, 27002791.53735474, -247616.67261456],
+ [0.81965461, 0.62779672, 18.66712192],
+ 137.22418515,
+ (330.13274246, -12.19080808),
+ (193.52803902, -11.9655787),
+ [
+ [9.99936382e-01, 1.03449371e-02, 4.49612148e-03],
+ [-1.03447528e-02, 9.99946490e-01, -6.42454089e-05],
+ [-4.49654551e-03, 1.77300565e-05, 9.99989890e-01],
+ ],
+ [2.44845888e08, -2.07448982e08, -7.63749418e07],
+ (-0.33676374, 140.18437233, 35793416.91561355),
+ 0,
+ 0,
+ ),
+ (
+ 50130.97569444,
+ (960217, 232500),
+ [5918053.49286455, -41753256.02295399, -271253.06495935],
+ [3043.73441705, 431.73053079, 5.39934712],
+ [-32391542.0492856, 27002977.3157848, -241957.93142027],
+ [0.81911313, 0.61127876, 19.05655891],
+ 138.47760748,
+ (330.13588763, -12.1896593),
+ (192.27775657, -11.96442254),
+ [
+ [9.99936382e-01, 1.03449388e-02, 4.49612225e-03],
+ [-1.03447545e-02, 9.99946490e-01, -6.42444238e-05],
+ [-4.49654627e-03, 1.77290557e-05, 9.99989890e-01],
+ ],
+ [2.44169846e08, -2.07358303e08, -7.62991102e07],
+ (-0.32906846, 140.18396465, 35793314.23041636),
+ 0,
+ 0,
+ ),
+ (
+ 50130.97916667,
+ (960217, 233000),
+ [6829686.08751574, -41613761.44760592, -269568.65462124],
+ [3033.5739409, 498.19630731, 5.82960444],
+ [-32391296.52466749, 27003158.10847847, -236183.72381214],
+ [0.81813262, 0.59456087, 19.43686189],
+ 139.73102981,
+ (330.1390265, -12.18850951),
+ (191.02746783, -11.96326537),
+ [
+ [9.99936382e-01, 1.03449406e-02, 4.49612302e-03],
+ [-1.03447563e-02, 9.99946490e-01, -6.42434389e-05],
+ [-4.49654703e-03, 1.77280550e-05, 9.99989890e-01],
+ ],
+ [2.43496443e08, -2.07287406e08, -7.62233495e07],
+ (-0.32121612, 140.18356238, 35793208.6428103),
+ 0,
+ 0,
+ ),
+ (
+ 50130.98263889,
+ (960217, 233500),
+ [7738052.74476409, -41454362.02480648, -267755.58296603],
+ [3021.96236148, 564.42422513, 6.25705512],
+ [-32391051.35918404, 27003333.85786499, -230296.81731314],
+ [0.81671881, 0.57765777, 19.80784932],
+ 140.98445214,
+ (330.14215916, -12.18735869),
+ (189.77717289, -11.96210717),
+ [
+ [9.99936381e-01, 1.03449423e-02, 4.49612379e-03],
+ [-1.03447580e-02, 9.99946489e-01, -6.42424541e-05],
+ [-4.49654778e-03, 1.77270545e-05, 9.99989890e-01],
+ ],
+ [2.42826115e08, -2.07236222e08, -7.61476592e07],
+ (-0.3132105, 140.18316567, 35793100.27882991),
+ 0,
+ 0,
+ ),
+ (
+ 50130.98611111,
+ (960217, 234000),
+ [8642718.9445816, -41275133.86582235, -265814.72261683],
+ [3008.90520686, 630.38261431, 6.68149519],
+ [-32390806.68247503, 27003504.50991426, -224300.03325666],
+ [0.81487783, 0.56058415, 20.16934411],
+ 142.23787447,
+ (330.14528573, -12.18620679),
+ (188.52687186, -11.9609479),
+ [
+ [9.99936381e-01, 1.03449440e-02, 4.49612456e-03],
+ [-1.03447598e-02, 9.99946489e-01, -6.42414694e-05],
+ [-4.49654854e-03, 1.77260540e-05, 9.99989890e-01],
+ ],
+ [2.42159297e08, -2.07204676e08, -7.60720382e07],
+ (-0.30505542, 140.18277471, 35792989.2656269),
+ 0,
+ 0,
+ ),
+ (
+ 50130.98958333,
+ (960217, 234500),
+ [9543251.93095296, -41076162.56379041, -263747.00717057],
+ [2994.40869593, 696.03993248, 7.10272213],
+ [-32390562.62077149, 27003670.01680953, -218196.24541058],
+ [0.81261619, 0.54335463, 20.52117372],
+ 143.4912968,
+ (330.14840632, -12.18505381),
+ (187.27656486, -11.95978754),
+ [
+ [9.99936381e-01, 1.03449458e-02, 4.49612532e-03],
+ [-1.03447615e-02, 9.99946489e-01, -6.42404848e-05],
+ [-4.49654930e-03, 1.77250538e-05, 9.99989890e-01],
+ ],
+ [2.41496422e08, -2.07192684e08, -7.59964859e07],
+ (-0.29675479, 140.18238966, 35792875.73125207),
+ 0,
+ 0,
+ ),
+ ],
+ dtype=fmt.ORBIT_PREDICTION_DATA,
+)
+
+ORBIT_PREDICTION_2 = np.array(
+ [
+ (
+ 50130.99305556,
+ (960217, 235000),
+ [10439220.91492008, -40857543.15396438, -261553.43075696],
+ [2978.47973561, 761.36477969, 7.52053495],
+ [-32390319.30020279, 27003830.33282405, -211988.37862591],
+ [0.80994076, 0.52598377, 20.86317023],
+ 144.74471913,
+ (330.15152105, -12.1838997),
+ (186.026252, -11.95862606),
+ [
+ [9.99936381e-01, 1.03449475e-02, 4.49612609e-03],
+ [-1.03447632e-02, 9.99946489e-01, -6.42395003e-05],
+ [-4.49655005e-03, 1.77240537e-05, 9.99989890e-01],
+ ],
+ [2.40837919e08, -2.07200148e08, -7.59210011e07],
+ (-0.28831259, 140.18201066, 35792759.80443729),
+ 0,
+ 0,
+ ),
+ (
+ 50130.99652778,
+ (960217, 235500),
+ [11330197.2840407, -40619380.06793167, -259235.04755252],
+ [2961.12591755, 826.32591367, 7.93473432],
+ [-32390076.84311398, 27003985.41857829, -205679.40741202],
+ [0.80685878, 0.50848599, 21.19517045],
+ 145.99814147,
+ (330.15463004, -12.18274445),
+ (184.77593341, -11.95746344),
+ [
+ [9.99936381e-01, 1.03449492e-02, 4.49612685e-03],
+ [-1.03447650e-02, 9.99946489e-01, -6.42385159e-05],
+ [-4.49655080e-03, 1.77230537e-05, 9.99989890e-01],
+ ],
+ [2.40184218e08, -2.07226967e08, -7.58455830e07],
+ (-0.27973286, 140.18163787, 35792641.6143761),
+ 0,
+ 0,
+ ),
+ (
+ 50131.0,
+ (960218, 0),
+ [12215754.80493221, -40361787.08463053, -256792.97127933],
+ [2942.35551459, 890.89226454, 8.34512262],
+ [-32389835.37113104, 27004135.23720251, -199272.35452792],
+ [0.8033778, 0.49087558, 21.51701595],
+ 147.2515638,
+ (330.15773341, -12.18158803),
+ (183.5256092, -11.95629965),
+ [
+ [9.99936381e-01, 1.03449510e-02, 4.49612761e-03],
+ [-1.03447667e-02, 9.99946489e-01, -6.42375317e-05],
+ [-4.49655155e-03, 1.77220539e-05, 9.99989890e-01],
+ ],
+ [2.39535744e08, -2.07273025e08, -7.57702305e07],
+ (-0.2710197, 140.18127143, 35792521.29050537),
+ 0,
+ 0,
+ ),
+ (
+ 50131.00347222,
+ (960218, 500),
+ [13095469.82708225, -40084887.27645436, -254228.37467049],
+ [2922.17747695, 955.03294974, 8.75150409],
+ [-32389595.00191828, 27004279.7580633, -192770.28953487],
+ [0.79950572, 0.47316669, 21.82855319],
+ 148.50498613,
+ (330.16083128, -12.18043041),
+ (182.27527951, -11.95513466),
+ [
+ [9.99936381e-01, 1.03449527e-02, 4.49612837e-03],
+ [-1.03447684e-02, 9.99946489e-01, -6.42365476e-05],
+ [-4.49655230e-03, 1.77210542e-05, 9.99989890e-01],
+ ],
+ [2.38892921e08, -2.07338200e08, -7.56949425e07],
+ (-0.26217728, 140.18091148, 35792398.96228714),
+ 0,
+ 0,
+ ),
+ (
+ 50131.00694444,
+ (960218, 1000),
+ [13968921.48773305, -39788812.95011112, -251542.48890031],
+ [2900.60142795, 1018.71728887, 9.15368488],
+ [-32389355.85220329, 27004418.95297137, -186176.32730922],
+ [0.79525074, 0.45537327, 22.12963356],
+ 149.75840846,
+ (330.16392379, -12.17927157),
+ (181.02494445, -11.95396845),
+ [
+ [9.99936381e-01, 1.03449544e-02, 4.49612913e-03],
+ [-1.03447701e-02, 9.99946489e-01, -6.42355636e-05],
+ [-4.49655305e-03, 1.77200547e-05, 9.99989890e-01],
+ ],
+ [2.38256170e08, -2.07422360e08, -7.56197178e07],
+ (-0.25320985, 140.18055815, 35792274.75899146),
+ 0,
+ 0,
+ ),
+ (
+ 50131.01041667,
+ (960218, 1500),
+ [14835691.90970188, -39473705.58489136, -248736.60300345],
+ [2877.63765957, 1081.9148182, 9.55147314],
+ [-32389118.03536845, 27004552.79890675, -179493.62657611],
+ [0.79062131, 0.43750908, 22.42011344],
+ 151.01183079,
+ (330.16701107, -12.17811148),
+ (179.77462147, -11.952801),
+ [
+ [9.99936381e-01, 1.03449561e-02, 4.49612989e-03],
+ [-1.03447719e-02, 9.99946489e-01, -6.42345798e-05],
+ [-4.49655380e-03, 1.77190553e-05, 9.99989890e-01],
+ ],
+ [2.37625908e08, -2.07525364e08, -7.55445552e07],
+ (-0.24412169, 140.18021156, 35792148.80948149),
+ 0,
+ 0,
+ ),
+ (
+ 50131.01388889,
+ (960218, 2000),
+ [15695366.40490882, -39139715.76420763, -245812.06324505],
+ [2853.29712752, 1144.59530548, 9.94467917],
+ [-32388881.66227116, 27004681.27687033, -172725.38836895],
+ [0.7856262, 0.41958762, 22.69985431],
+ 152.26525312,
+ (330.17009324, -12.17695013),
+ (178.52427609, -11.95163228),
+ [
+ [9.99936381e-01, 1.03449578e-02, 4.49613064e-03],
+ [-1.03447736e-02, 9.99946489e-01, -6.42335961e-05],
+ [-4.49655455e-03, 1.77180562e-05, 9.99989890e-01],
+ ],
+ [2.37002549e08, -2.07647061e08, -7.54694534e07],
+ (-0.23491716, 140.17987182, 35792021.2420001),
+ 0,
+ 0,
+ ),
+ (
+ 50131.01736111,
+ (960218, 2500),
+ [16547533.6691137, -38787003.10533711, -242770.27248672],
+ [2827.5914462, 1206.72876414, 10.33311542],
+ [-32388646.84104986, 27004804.37195345, -165874.85452439],
+ [0.78027439, 0.40162218, 22.96872279],
+ 153.51867545,
+ (330.17317044, -12.17578748),
+ (177.27392574, -11.95046228),
+ [
+ [9.99936381e-01, 1.03449595e-02, 4.49613140e-03],
+ [-1.03447753e-02, 9.99946489e-01, -6.42326125e-05],
+ [-4.49655529e-03, 1.77170571e-05, 9.99989890e-01],
+ ],
+ [2.36386506e08, -2.07787291e08, -7.53944111e07],
+ (-0.22560065, 140.17953905, 35791892.18395986),
+ 0,
+ 0,
+ ),
+ (
+ 50131.02083333,
+ (960218, 3000),
+ [17391785.98229151, -38415736.18212036, -239612.68950141],
+ [2800.53288309, 1268.28546791, 10.71659666],
+ [-32388413.67874206, 27004922.07123395, -158945.30610131],
+ [0.77457509, 0.38362576, 23.2265907],
+ 154.77209777,
+ (330.17624281, -12.17462353),
+ (176.02357057, -11.94929096),
+ [
+ [9.99936381e-01, 1.03449612e-02, 4.49613215e-03],
+ [-1.03447770e-02, 9.99946489e-01, -6.42316291e-05],
+ [-4.49655603e-03, 1.77160583e-05, 9.99989890e-01],
+ ],
+ [2.35778185e08, -2.07945887e08, -7.53194268e07],
+ (-0.21617663, 140.17921335, 35791761.76173551),
+ 0,
+ 0,
+ ),
+ ],
+ dtype=fmt.ORBIT_PREDICTION_DATA,
+)
diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py
new file mode 100644
index 0000000000..31482f1e10
--- /dev/null
+++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_l1b.py
@@ -0,0 +1,630 @@
+"""Unit tests for GMS-5 VISSR reader."""
+
+import datetime as dt
+import gzip
+
+import fsspec
+import numpy as np
+import pytest
+import xarray as xr
+from pyresample.geometry import AreaDefinition
+
+import satpy.readers.gms.gms5_vissr_format as fmt
+import satpy.readers.gms.gms5_vissr_l1b as vissr
+import satpy.readers.gms.gms5_vissr_navigation as nav
+import satpy.tests.reader_tests.gms.test_gms5_vissr_data as real_world
+from satpy.readers import FSFile
+from satpy.tests.reader_tests.utils import get_jit_methods
+from satpy.tests.utils import make_dataid
+
+
+@pytest.fixture(params=[False, True], autouse=True)
+def disable_jit(request, monkeypatch):
+ """Run tests with jit enabled and disabled.
+
+ Reason: Coverage report is only accurate with jit disabled.
+ """
+ if request.param:
+ jit_methods = get_jit_methods(vissr)
+ for name, method in jit_methods.items():
+ monkeypatch.setattr(name, method.py_func)
+
+
+class TestEarthMask:
+ """Test getting the earth mask."""
+
+ def test_get_earth_mask(self):
+ """Test getting the earth mask."""
+ first_earth_pixels = np.array([-1, 1, 0, -1])
+ last_earth_pixels = np.array([-1, 3, 2, -1])
+ edges = first_earth_pixels, last_earth_pixels
+ mask_exp = np.array(
+ [[0, 0, 0, 0],
+ [0, 1, 1, 1],
+ [1, 1, 1, 0],
+ [0, 0, 0, 0]]
+ )
+ mask = vissr.get_earth_mask(mask_exp.shape, edges)
+ np.testing.assert_equal(mask, mask_exp)
+
+
+class TestFileHandler:
+ """Test VISSR file handler."""
+
+ @pytest.fixture(autouse=True)
+ def patch_number_of_pixels_per_scanline(self, monkeypatch):
+ """Patch data types so that each scanline has two pixels."""
+ num_pixels = 2
+ IMAGE_DATA_BLOCK_IR = np.dtype(
+ [
+ ("LCW", fmt.LINE_CONTROL_WORD),
+ ("DOC", fmt.U1, (256,)),
+ ("image_data", fmt.U1, num_pixels),
+ ]
+ )
+ IMAGE_DATA_BLOCK_VIS = np.dtype(
+ [
+ ("LCW", fmt.LINE_CONTROL_WORD),
+ ("DOC", fmt.U1, (64,)),
+ ("image_data", fmt.U1, (num_pixels,)),
+ ]
+ )
+ IMAGE_DATA = {
+ fmt.VIS_CHANNEL: {
+ "offset": 6 * fmt.BLOCK_SIZE_VIS,
+ "dtype": IMAGE_DATA_BLOCK_VIS,
+ },
+ fmt.IR_CHANNEL: {
+ "offset": 18 * fmt.BLOCK_SIZE_IR,
+ "dtype": IMAGE_DATA_BLOCK_IR,
+ },
+ }
+ monkeypatch.setattr(
+ "satpy.readers.gms.gms5_vissr_format.IMAGE_DATA_BLOCK_IR", IMAGE_DATA_BLOCK_IR
+ )
+ monkeypatch.setattr(
+ "satpy.readers.gms.gms5_vissr_format.IMAGE_DATA_BLOCK_VIS", IMAGE_DATA_BLOCK_VIS
+ )
+ monkeypatch.setattr("satpy.readers.gms.gms5_vissr_format.IMAGE_DATA", IMAGE_DATA)
+
+ @pytest.fixture(
+ params=[
+ make_dataid(name="VIS", calibration="reflectance", resolution=1250),
+ make_dataid(
+ name="IR1", calibration="brightness_temperature", resolution=5000
+ ),
+ make_dataid(name="IR1", calibration="counts", resolution=5000),
+ ]
+ )
+ def dataset_id(self, request):
+ """Get dataset ID."""
+ return request.param
+
+ @pytest.fixture(params=[True, False])
+ def mask_space(self, request):
+ """Mask space pixels."""
+ return request.param
+
+ @pytest.fixture(params=[True, False])
+ def with_compression(self, request):
+ """Enable compression."""
+ return request.param
+
+ @pytest.fixture
+ def open_function(self, with_compression):
+ """Get open function for writing test files."""
+ return gzip.open if with_compression else open
+
+ @pytest.fixture
+ def vissr_file(self, dataset_id, file_contents, open_function, tmp_path):
+ """Get test VISSR file."""
+ filename = tmp_path / "vissr_file"
+ ch_type = fmt.CHANNEL_TYPES[dataset_id["name"]]
+ writer = VissrFileWriter(ch_type, open_function)
+ writer.write(filename, file_contents)
+ return filename
+
+ @pytest.fixture
+ def file_contents(self, control_block, image_parameters, image_data):
+ """Get VISSR file contents."""
+ return {
+ "control_block": control_block,
+ "image_parameters": image_parameters,
+ "image_data": image_data,
+ }
+
+ @pytest.fixture
+ def control_block(self, dataset_id):
+ """Get VISSR control block."""
+ block_size = {"IR1": 16, "VIS": 4}
+ ctrl_block = np.zeros(1, dtype=fmt.CONTROL_BLOCK)
+ ctrl_block["parameter_block_size"] = block_size[dataset_id["name"]]
+ ctrl_block["available_block_size_of_image_data"] = 2
+ return ctrl_block
+
+ @pytest.fixture
+ def image_parameters(self, mode_block, cal_params, nav_params):
+ """Get VISSR image parameters."""
+ image_params = {"mode": mode_block}
+ image_params.update(cal_params)
+ image_params.update(nav_params)
+ return image_params
+
+ @pytest.fixture
+ def nav_params(
+ self,
+ coordinate_conversion,
+ attitude_prediction,
+ orbit_prediction,
+ ):
+ """Get navigation parameters."""
+ nav_params = {}
+ nav_params.update(attitude_prediction)
+ nav_params.update(orbit_prediction)
+ nav_params.update(coordinate_conversion)
+ return nav_params
+
+ @pytest.fixture
+ def cal_params(
+ self,
+ vis_calibration,
+ ir1_calibration,
+ ir2_calibration,
+ wv_calibration,
+ ):
+ """Get calibration parameters."""
+ return {
+ "vis_calibration": vis_calibration,
+ "ir1_calibration": ir1_calibration,
+ "ir2_calibration": ir2_calibration,
+ "wv_calibration": wv_calibration,
+ }
+
+ @pytest.fixture
+ def mode_block(self):
+ """Get VISSR mode block."""
+ mode = np.zeros(1, dtype=fmt.MODE_BLOCK)
+ mode["satellite_name"] = b"GMS-5 "
+ mode["spin_rate"] = 99.21774
+ mode["observation_time_mjd"] = 50000.0
+ mode["ssp_longitude"] = 140.0
+ mode["satellite_height"] = 123456.0
+ mode["ir_frame_parameters"]["number_of_lines"] = 2
+ mode["ir_frame_parameters"]["number_of_pixels"] = 2
+ mode["vis_frame_parameters"]["number_of_lines"] = 2
+ mode["vis_frame_parameters"]["number_of_pixels"] = 2
+ return mode
+
+ @pytest.fixture
+ def coordinate_conversion(self, coord_conv, simple_coord_conv_table):
+ """Get all coordinate conversion parameters."""
+ return {
+ "coordinate_conversion": coord_conv,
+ "simple_coordinate_conversion_table": simple_coord_conv_table
+ }
+
+ @pytest.fixture
+ def coord_conv(self):
+ """Get parameters for coordinate conversions.
+
+ Adjust pixel offset so that the first column is at the image center.
+ This has the advantage that we can test with very small 2x2 images.
+ Otherwise, all pixels would be in space.
+ """
+ conv = np.zeros(1, dtype=fmt.COORDINATE_CONVERSION_PARAMETERS)
+
+ cline = conv["central_line_number_of_vissr_frame"]
+ cline["IR1"] = 1378.5
+ cline["VIS"] = 5513.0
+
+ cpix = conv["central_pixel_number_of_vissr_frame"]
+ cpix["IR1"] = 0.5 # instead of 1672.5
+ cpix["VIS"] = 0.5 # instead of 6688.5
+
+ conv['scheduled_observation_time'] = 50130.979089568464
+
+ nsensors = conv["number_of_sensor_elements"]
+ nsensors["IR1"] = 1
+ nsensors["VIS"] = 4
+
+ sampling_angle = conv["sampling_angle_along_pixel"]
+ sampling_angle["IR1"] = 9.5719995e-05
+ sampling_angle["VIS"] = 2.3929999e-05
+
+ stepping_angle = conv["stepping_angle_along_line"]
+ stepping_angle["IR1"] = 0.00014000005
+ stepping_angle["VIS"] = 3.5000005e-05
+
+ conv["matrix_of_misalignment"] = np.array(
+ [[9.9999917e-01, -5.1195198e-04, -1.2135329e-03],
+ [5.1036407e-04, 9.9999905e-01, -1.3083406e-03],
+ [1.2142011e-03, 1.3077201e-03, 9.9999845e-01]],
+ dtype=np.float32
+ )
+
+ conv["parameters"]["equatorial_radius"] = 6377397.0
+ conv["parameters"]["oblateness_of_earth"] = 0.003342773
+
+ conv["orbital_parameters"]["longitude_of_ssp"] = 141.0
+ conv["orbital_parameters"]["latitude_of_ssp"] = 1.0
+ return conv
+
+ @pytest.fixture
+ def attitude_prediction(self):
+ """Get attitude prediction."""
+ att_pred = np.zeros(1, dtype=fmt.ATTITUDE_PREDICTION)
+ att_pred["data"] = real_world.ATTITUDE_PREDICTION
+ return {"attitude_prediction": att_pred}
+
+ @pytest.fixture
+ def orbit_prediction(self, orbit_prediction_1, orbit_prediction_2):
+ """Get predictions of orbital parameters."""
+ return {
+ "orbit_prediction_1": orbit_prediction_1,
+ "orbit_prediction_2": orbit_prediction_2
+ }
+
+ @pytest.fixture
+ def orbit_prediction_1(self):
+ """Get first block of orbit prediction data."""
+ orb_pred = np.zeros(1, dtype=fmt.ORBIT_PREDICTION)
+ orb_pred["data"] = real_world.ORBIT_PREDICTION_1
+ return orb_pred
+
+ @pytest.fixture
+ def orbit_prediction_2(self):
+ """Get second block of orbit prediction data."""
+ orb_pred = np.zeros(1, dtype=fmt.ORBIT_PREDICTION)
+ orb_pred["data"] = real_world.ORBIT_PREDICTION_2
+ return orb_pred
+
+ @pytest.fixture
+ def vis_calibration(self):
+ """Get VIS calibration block."""
+ vis_cal = np.zeros(1, dtype=fmt.VIS_CALIBRATION)
+ table = vis_cal["vis1_calibration_table"]["brightness_albedo_conversion_table"]
+ table[0, 0:4] = np.array([0, 0.25, 0.5, 1])
+ return vis_cal
+
+ @pytest.fixture
+ def ir1_calibration(self):
+ """Get IR1 calibration block."""
+ cal = np.zeros(1, dtype=fmt.IR_CALIBRATION)
+ table = cal["conversion_table_of_equivalent_black_body_temperature"]
+ table[0, 0:4] = np.array([0, 100, 200, 300])
+ return cal
+
+ @pytest.fixture
+ def ir2_calibration(self):
+ """Get IR2 calibration block."""
+ cal = np.zeros(1, dtype=fmt.IR_CALIBRATION)
+ return cal
+
+ @pytest.fixture
+ def wv_calibration(self):
+ """Get WV calibration block."""
+ cal = np.zeros(1, dtype=fmt.IR_CALIBRATION)
+ return cal
+
+ @pytest.fixture
+ def simple_coord_conv_table(self):
+ """Get simple coordinate conversion table."""
+ table = np.zeros(1, dtype=fmt.SIMPLE_COORDINATE_CONVERSION_TABLE)
+ table["satellite_height"] = 123457.0
+ return table
+
+ @pytest.fixture
+ def image_data(self, dataset_id, image_data_ir1, image_data_vis):
+ """Get VISSR image data."""
+ data = {"IR1": image_data_ir1, "VIS": image_data_vis}
+ return data[dataset_id["name"]]
+
+ @pytest.fixture
+ def image_data_ir1(self):
+ """Get IR1 image data."""
+ image_data = np.zeros(2, fmt.IMAGE_DATA_BLOCK_IR)
+ image_data["LCW"]["line_number"] = [686, 2089]
+ image_data["LCW"]["scan_time"] = [50000, 50000]
+ image_data["LCW"]["west_side_earth_edge"] = [0, 0]
+ image_data["LCW"]["east_side_earth_edge"] = [1, 1]
+ image_data["image_data"] = [[0, 1], [2, 3]]
+ return image_data
+
+ @pytest.fixture
+ def image_data_vis(self):
+ """Get VIS image data."""
+ image_data = np.zeros(2, fmt.IMAGE_DATA_BLOCK_VIS)
+ image_data["LCW"]["line_number"] = [2744, 8356]
+ image_data["LCW"]["scan_time"] = [50000, 50000]
+ image_data["LCW"]["west_side_earth_edge"] = [-1, 0]
+ image_data["LCW"]["east_side_earth_edge"] = [-1, 1]
+ image_data["image_data"] = [[0, 1], [2, 3]]
+ return image_data
+
+ @pytest.fixture
+ def vissr_file_like(self, vissr_file, with_compression):
+ """Get file-like object for VISSR test file."""
+ if with_compression:
+ open_file = fsspec.open(vissr_file, compression="gzip")
+ return FSFile(open_file)
+ return vissr_file
+
+ @pytest.fixture
+ def file_handler(self, vissr_file_like, mask_space):
+ """Get file handler to be tested."""
+ return vissr.GMS5VISSRFileHandler(
+ vissr_file_like, {}, {}, mask_space=mask_space
+ )
+
+ @pytest.fixture
+ def vis_refl_exp(self, mask_space, lons_lats_exp):
+ """Get expected VIS reflectance."""
+ lons, lats = lons_lats_exp
+ if mask_space:
+ data = [[np.nan, np.nan], [50, 100]]
+ else:
+ data = [[0, 25], [50, 100]]
+ return xr.DataArray(
+ data,
+ dims=("y", "x"),
+ coords={
+ "lon": lons,
+ "lat": lats,
+ "acq_time": (
+ "y",
+ [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)],
+ ),
+ "line_number": ("y", [2744, 8356]),
+ },
+ )
+
+ @pytest.fixture
+ def ir1_counts_exp(self, lons_lats_exp):
+ """Get expected IR1 counts."""
+ lons, lats = lons_lats_exp
+ return xr.DataArray(
+ [[0, 1], [2, 3]],
+ dims=("y", "x"),
+ coords={
+ "lon": lons,
+ "lat": lats,
+ "acq_time": (
+ "y",
+ [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)],
+ ),
+ "line_number": ("y", [686, 2089]),
+ },
+ )
+
+ @pytest.fixture
+ def ir1_bt_exp(self, lons_lats_exp):
+ """Get expected IR1 brightness temperature."""
+ lons, lats = lons_lats_exp
+ return xr.DataArray(
+ [[0, 100], [200, 300]],
+ dims=("y", "x"),
+ coords={
+ "lon": lons,
+ "lat": lats,
+ "acq_time": (
+ "y",
+ [dt.datetime(1995, 10, 10), dt.datetime(1995, 10, 10)],
+ ),
+ "line_number": ("y", [686, 2089]),
+ },
+ )
+
+ @pytest.fixture
+ def lons_lats_exp(self, dataset_id):
+ """Get expected lon/lat coordinates.
+
+ Computed with JMA's Msial library for 2 pixels near the central column
+ (6688.5/1672.5 for VIS/IR).
+
+ VIS:
+
+ pix = [6688, 6688, 6689, 6689]
+ lin = [2744, 8356, 2744, 8356]
+
+ IR1:
+
+ pix = [1672, 1672, 1673, 1673]
+ lin = [686, 2089, 686, 2089]
+ """
+ expectations = {
+ "IR1": {
+ "lons": [[139.680120, 139.718902],
+ [140.307367, 140.346062]],
+ "lats": [[35.045132, 35.045361],
+ [-34.971012, -34.970738]]
+ },
+ "VIS": {
+ "lons": [[139.665133, 139.674833],
+ [140.292579, 140.302249]],
+ "lats": [[35.076113, 35.076170],
+ [-34.940439, -34.940370]]
+ }
+ }
+ exp = expectations[dataset_id["name"]]
+ lons = xr.DataArray(exp["lons"], dims=("y", "x"))
+ lats = xr.DataArray(exp["lats"], dims=("y", "x"))
+ return lons, lats
+
+ @pytest.fixture
+ def dataset_exp(self, dataset_id, ir1_counts_exp, ir1_bt_exp, vis_refl_exp):
+ """Get expected dataset."""
+ ir1_counts_id = make_dataid(name="IR1", calibration="counts", resolution=5000)
+ ir1_bt_id = make_dataid(
+ name="IR1", calibration="brightness_temperature", resolution=5000
+ )
+ vis_refl_id = make_dataid(
+ name="VIS", calibration="reflectance", resolution=1250
+ )
+ expectations = {
+ ir1_counts_id: ir1_counts_exp,
+ ir1_bt_id: ir1_bt_exp,
+ vis_refl_id: vis_refl_exp,
+ }
+ return expectations[dataset_id]
+
+ @pytest.fixture
+ def area_def_exp(self, dataset_id):
+ """Get expected area definition."""
+ if dataset_id["name"] == "IR1":
+ resol = 5
+ size = 2366
+ extent = (-20438.1468, -20438.1468, 20455.4306, 20455.4306)
+ else:
+ resol = 1
+ size = 9464
+ extent = (-20444.6235, -20444.6235, 20448.9445, 20448.9445)
+ area_id = f"gms-5_vissr_western-pacific_{resol}km"
+ desc = f"GMS-5 VISSR Western Pacific area definition with {resol} km resolution"
+ return AreaDefinition(
+ area_id=area_id,
+ description=desc,
+ proj_id=area_id,
+ projection={
+ "a": nav.EARTH_EQUATORIAL_RADIUS,
+ "b": nav.EARTH_POLAR_RADIUS,
+ "h": "123456",
+ "lon_0": "140",
+ "no_defs": "None",
+ "proj": "geos",
+ "type": "crs",
+ "units": "m",
+ "x_0": "0",
+ "y_0": "0",
+ },
+ area_extent=extent,
+ width=size,
+ height=size,
+ )
+
+ @pytest.fixture
+ def attrs_exp(self, area_def_exp):
+ """Get expected dataset attributes."""
+ return {
+ "yaml": "info",
+ "platform": "GMS-5",
+ "sensor": "VISSR",
+ "time_parameters": {
+ "nominal_start_time": dt.datetime(1995, 10, 10),
+ "nominal_end_time": dt.datetime(1995, 10, 10, 0, 25),
+ },
+ "orbital_parameters": {
+ "satellite_nominal_longitude": 140.0,
+ "satellite_nominal_latitude": 0.0,
+ "satellite_nominal_altitude": 123456.0,
+ "satellite_actual_longitude": 141.0,
+ "satellite_actual_latitude": 1.0,
+ "satellite_actual_altitude": 123457.0,
+ },
+ "area_def_uniform_sampling": area_def_exp,
+ }
+
+ def test_get_dataset(self, file_handler, dataset_id, dataset_exp, attrs_exp):
+ """Test getting the dataset."""
+ dataset = file_handler.get_dataset(dataset_id, {"yaml": "info"})
+ xr.testing.assert_allclose(dataset.compute(), dataset_exp, atol=1e-6)
+ assert dataset.attrs == attrs_exp
+
+ def test_time_attributes(self, file_handler, attrs_exp):
+ """Test the file handler's time attributes."""
+ start_time_exp = attrs_exp["time_parameters"]["nominal_start_time"]
+ end_time_exp = attrs_exp["time_parameters"]["nominal_end_time"]
+ assert file_handler.start_time == start_time_exp
+ assert file_handler.end_time == end_time_exp
+
+
+class TestCorruptFile:
+ """Test reading corrupt files."""
+
+ @pytest.fixture
+ def file_contents(self):
+ """Get corrupt file contents (all zero)."""
+ control_block = np.zeros(1, dtype=fmt.CONTROL_BLOCK)
+ image_data = np.zeros(1, dtype=fmt.IMAGE_DATA_BLOCK_IR)
+ return {
+ "control_block": control_block,
+ "image_parameters": {},
+ "image_data": image_data,
+ }
+
+ @pytest.fixture
+ def corrupt_file(self, file_contents, tmp_path):
+ """Write corrupt VISSR file to disk."""
+ filename = tmp_path / "my_vissr_file"
+ writer = VissrFileWriter(ch_type="VIS", open_function=open)
+ writer.write(filename, file_contents)
+ return filename
+
+ def test_corrupt_file(self, corrupt_file):
+ """Test reading a corrupt file."""
+ with pytest.raises(ValueError, match=r".* corrupt .*"):
+ vissr.GMS5VISSRFileHandler(corrupt_file, {}, {})
+
+
+class VissrFileWriter:
+ """Write data in VISSR archive format."""
+
+ image_params_order = [
+ "mode",
+ "coordinate_conversion",
+ "attitude_prediction",
+ "orbit_prediction_1",
+ "orbit_prediction_2",
+ "vis_calibration",
+ "ir1_calibration",
+ "ir2_calibration",
+ "wv_calibration",
+ "simple_coordinate_conversion_table",
+ ]
+
+ def __init__(self, ch_type, open_function):
+ """Initialize the writer.
+
+ Args:
+ ch_type: Channel type (VIS or IR)
+ open_function: Open function to be used (e.g. open or gzip.open)
+ """
+ self.ch_type = ch_type
+ self.open_function = open_function
+
+ def write(self, filename, contents):
+ """Write file contents to disk."""
+ with self.open_function(filename, mode="wb") as fd:
+ self._write_control_block(fd, contents)
+ self._write_image_parameters(fd, contents)
+ self._write_image_data(fd, contents)
+
+ def _write_control_block(self, fd, contents):
+ self._write(fd, contents["control_block"])
+
+ def _write_image_parameters(self, fd, contents):
+ for name in self.image_params_order:
+ im_param = contents["image_parameters"].get(name)
+ if im_param:
+ self._write_image_parameter(fd, im_param, name)
+
+ def _write_image_parameter(self, fd, im_param, name):
+ offset = fmt.IMAGE_PARAMS[name]["offset"][self.ch_type]
+ self._write(fd, im_param, offset)
+
+ def _write_image_data(self, fd, contents):
+ offset = fmt.IMAGE_DATA[self.ch_type]["offset"]
+ self._write(fd, contents["image_data"], offset)
+
+ def _write(self, fd, data, offset=None):
+ """Write data to file.
+
+ If specified, prepend with 'offset' placeholder bytes.
+ """
+ if offset:
+ self._fill(fd, offset)
+ fd.write(data.tobytes())
+
+ def _fill(self, fd, target_byte):
+ """Write placeholders from current position to target byte."""
+ nbytes = target_byte - fd.tell()
+ fd.write(b" " * nbytes)
diff --git a/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py
new file mode 100644
index 0000000000..47c5fd044c
--- /dev/null
+++ b/satpy/tests/reader_tests/gms/test_gms5_vissr_navigation.py
@@ -0,0 +1,567 @@
+"""Unit tests for GMS-5 VISSR navigation."""
+
+import numpy as np
+import pytest
+
+import satpy.readers.gms.gms5_vissr_navigation as nav
+from satpy.tests.reader_tests.utils import get_jit_methods
+
+# Navigation references computed with JMA's Msial library (files
+# VISSR_19960217_2331_IR1.A.IMG and VISSR_19960217_2331_VIS.A.IMG). The VIS
+# navigation is slightly off (< 0.01 deg) compared to JMA's reference.
+# This is probably due to precision problems with the copied numbers.
+IR_NAVIGATION_REFERENCE = [
+ {
+ "pixel": nav.Pixel(line=686, pixel=1680),
+ 'lon': 139.990380,
+ 'lat': 35.047056,
+ 'nav_params': nav.PixelNavigationParameters(
+ attitude=nav.Attitude(
+ angle_between_earth_and_sun=3.997397917902958,
+ angle_between_sat_spin_and_z_axis=3.149118633034304,
+ angle_between_sat_spin_and_yz_plane=0.000546042025980,
+ ),
+ orbit=nav.Orbit(
+ angles=nav.OrbitAngles(
+ greenwich_sidereal_time=2.468529732418296,
+ declination_from_sat_to_sun=-0.208770861178982,
+ right_ascension_from_sat_to_sun=3.304369303579407,
+ ),
+ sat_position=nav.Vector3D(
+ x=-32390963.148471601307392,
+ y=27003395.381247851997614,
+ z=-228134.860026293463307,
+ ),
+ nutation_precession=np.array(
+ [[0.999936381496146, -0.010344758016410, -0.004496547784299],
+ [0.010344942303489, 0.999946489495557, 0.000017727054455],
+ [0.004496123789670, -0.000064242454080, 0.999989890320785]]
+ ),
+ ),
+ proj_params=nav.ProjectionParameters(
+ image_offset=nav.ImageOffset(
+ line_offset=1378.5,
+ pixel_offset=1672.5,
+ ),
+ scanning_angles=nav.ScanningAngles(
+ stepping_angle=0.000140000047395,
+ sampling_angle=0.000095719995443,
+ misalignment=np.array(
+ [[0.999999165534973, 0.000510364072397, 0.001214201096445],
+ [-0.000511951977387, 0.999999046325684, 0.001307720085606],
+ [-0.001213532872498, -0.001308340579271, 0.999998450279236]]
+ )
+ ),
+ earth_ellipsoid=nav.EarthEllipsoid(
+ flattening=0.003352813177897,
+ equatorial_radius=6378136.0
+ )
+ ),
+ )
+ },
+ {
+ "pixel": nav.Pixel(line=2089, pixel=1793),
+ 'lon': 144.996967,
+ 'lat': -34.959853,
+ 'nav_params': nav.PixelNavigationParameters(
+ attitude=nav.Attitude(
+ angle_between_earth_and_sun=3.935707944355762,
+ angle_between_sat_spin_and_z_axis=3.149118633034304,
+ angle_between_sat_spin_and_yz_plane=0.000546042025980,
+ ),
+ orbit=nav.Orbit(
+ angles=nav.OrbitAngles(
+ greenwich_sidereal_time=2.530392320846865,
+ declination_from_sat_to_sun=-0.208713576872247,
+ right_ascension_from_sat_to_sun=3.242660398458377,
+ ),
+ sat_position=nav.Vector3D(
+ x=-32390273.633551981300116,
+ y=27003859.543135114014149,
+ z=-210800.087589388160268,
+ ),
+ nutation_precession=np.array(
+ [[0.999936381432029, -0.010344763228876, -0.004496550050695],
+ [0.010344947502662, 0.999946489441823, 0.000017724053657],
+ [0.004496126086653, -0.000064239500295, 0.999989890310647]]
+ ),
+ ),
+ proj_params=nav.ProjectionParameters(
+ image_offset=nav.ImageOffset(
+ line_offset=1378.5,
+ pixel_offset=1672.5,
+ ),
+ scanning_angles=nav.ScanningAngles(
+ stepping_angle=0.000140000047395,
+ sampling_angle=0.000095719995443,
+ misalignment=np.array(
+ [[0.999999165534973, 0.000510364072397, 0.001214201096445],
+ [-0.000511951977387, 0.999999046325684, 0.001307720085606],
+ [-0.001213532872498, -0.001308340579271, 0.999998450279236]]
+ ),
+ ),
+ earth_ellipsoid=nav.EarthEllipsoid(
+ flattening=0.003352813177897,
+ equatorial_radius=6378136
+ )
+ ),
+ )
+ }
+]
+
+
+VIS_NAVIGATION_REFERENCE = [
+ {
+ "pixel": nav.Pixel(line=2744, pixel=6720),
+ 'lon': 139.975527,
+ 'lat': 35.078028,
+ 'nav_params': nav.PixelNavigationParameters(
+ attitude=nav.Attitude(
+ angle_between_earth_and_sun=3.997397918405798,
+ angle_between_sat_spin_and_z_axis=3.149118633034304,
+ angle_between_sat_spin_and_yz_plane=0.000546042025980,
+ ),
+ orbit=nav.Orbit(
+ angles=nav.OrbitAngles(
+ greenwich_sidereal_time=2.468529731914041,
+ declination_from_sat_to_sun=-0.208770861179448,
+ right_ascension_from_sat_to_sun=3.304369304082406,
+ ),
+ sat_position=nav.Vector3D(
+ x=-32390963.148477241396904,
+ y=27003395.381243918091059,
+ z=-228134.860164520738181,
+ ),
+ nutation_precession=np.array(
+ [[0.999936381496146, -0.010344758016410, -0.004496547784299],
+ [0.010344942303489, 0.999946489495557, 0.000017727054455],
+ [0.004496123789670, -0.000064242454080, 0.999989890320785]]
+ ),
+ ),
+ proj_params=nav.ProjectionParameters(
+ image_offset=nav.ImageOffset(
+ line_offset=5513.0,
+ pixel_offset=6688.5,
+ ),
+ scanning_angles=nav.ScanningAngles(
+ stepping_angle=0.000035000004573,
+ sampling_angle=0.000023929998861,
+ misalignment=np.array(
+ [[0.999999165534973, 0.000510364072397, 0.001214201096445],
+ [-0.000511951977387, 0.999999046325684, 0.001307720085606],
+ [-0.001213532872498, -0.001308340579271, 0.999998450279236]]
+ ),
+ ),
+ earth_ellipsoid=nav.EarthEllipsoid(
+ flattening=0.003352813177897,
+ equatorial_radius=6378136
+ )
+ ),
+ )
+ },
+ {
+ "pixel": nav.Pixel(line=8356, pixel=7172),
+ 'lon': 144.980104,
+ 'lat': -34.929123,
+ 'nav_params': nav.PixelNavigationParameters(
+ attitude=nav.Attitude(
+ angle_between_earth_and_sun=3.935707944858620,
+ angle_between_sat_spin_and_z_axis=3.149118633034304,
+ angle_between_sat_spin_and_yz_plane=0.000546042025980,
+ ),
+ orbit=nav.Orbit(
+ angles=nav.OrbitAngles(
+ greenwich_sidereal_time=2.530392320342610,
+ declination_from_sat_to_sun=-0.208713576872715,
+ right_ascension_from_sat_to_sun=3.242660398961383,
+ ),
+ sat_position=nav.Vector3D(
+ x=-32390273.633557569235563,
+ y=27003859.543131537735462,
+ z=-210800.087734811415430,
+ ),
+ nutation_precession=np.array(
+ [[0.999936381432029, -0.010344763228876, -0.004496550050695],
+ [0.010344947502662, 0.999946489441823, 0.000017724053657],
+ [0.004496126086653, -0.000064239500295, 0.999989890310647]]
+ ),
+ ),
+ proj_params=nav.ProjectionParameters(
+ image_offset=nav.ImageOffset(
+ line_offset=5513.0,
+ pixel_offset=6688.5,
+ ),
+ scanning_angles=nav.ScanningAngles(
+ stepping_angle=0.000035000004573,
+ sampling_angle=0.000023929998861,
+ misalignment=np.array(
+ [[0.999999165534973, 0.000510364072397, 0.001214201096445],
+ [-0.000511951977387, 0.999999046325684, 0.001307720085606],
+ [-0.001213532872498, -0.001308340579271, 0.999998450279236]]
+ ),
+ ),
+ earth_ellipsoid=nav.EarthEllipsoid(
+ flattening=0.003352813177897,
+ equatorial_radius=6378136
+ )
+ ),
+ )
+ },
+]
+
+NAVIGATION_REFERENCE = VIS_NAVIGATION_REFERENCE + IR_NAVIGATION_REFERENCE
+
+
+@pytest.fixture(params=[False, True], autouse=True)
+def disable_jit(request, monkeypatch):
+ """Run tests with jit enabled and disabled.
+
+ Reason: Coverage report is only accurate with jit disabled.
+ """
+ if request.param:
+ jit_methods = get_jit_methods(nav)
+ for name, method in jit_methods.items():
+ monkeypatch.setattr(name, method.py_func)
+
+
+class TestSinglePixelNavigation:
+ """Test navigation of a single pixel."""
+
+ @pytest.mark.parametrize(
+ "point,nav_params,expected",
+ [
+ (ref["pixel"], ref["nav_params"], (ref["lon"], ref["lat"]))
+ for ref in NAVIGATION_REFERENCE
+ ],
+ )
+ def test_get_lon_lat(self, point, nav_params, expected):
+ """Test getting lon/lat coordinates for a given pixel."""
+ lon, lat = nav.get_lon_lat(point, nav_params)
+ np.testing.assert_allclose((lon, lat), expected)
+
+ def test_transform_image_coords_to_scanning_angles(self):
+ """Test transformation from image coordinates to scanning angles."""
+ offset = nav.ImageOffset(line_offset=100, pixel_offset=200)
+ scanning_angles = nav.ScanningAngles(
+ stepping_angle=0.01, sampling_angle=0.02, misalignment=-999
+ )
+ angles = nav.transform_image_coords_to_scanning_angles(
+ point=nav.Pixel(199, 99),
+ image_offset=offset,
+ scanning_angles=scanning_angles,
+ )
+ np.testing.assert_allclose(angles, [-2, 1])
+
+ def test_transform_scanning_angles_to_satellite_coords(self):
+ """Test transformation from scanning angles to satellite coordinates."""
+ scanning_angles = nav.Vector2D(np.pi, np.pi / 2)
+ misalignment = np.diag([1, 2, 3]).astype(float)
+ point_sat = nav.transform_scanning_angles_to_satellite_coords(
+ scanning_angles, misalignment
+ )
+ np.testing.assert_allclose(point_sat, [0, 0, 3], atol=1e-12)
+
+ def test_transform_satellite_to_earth_fixed_coords(self):
+ """Test transformation from satellite to earth-fixed coordinates."""
+ point_sat = nav.Vector3D(1, 2, 3)
+ attitude = nav.Attitude(
+ angle_between_earth_and_sun=np.pi,
+ angle_between_sat_spin_and_z_axis=np.pi,
+ angle_between_sat_spin_and_yz_plane=np.pi / 2,
+ )
+ orbit = nav.Orbit(
+ angles=nav.OrbitAngles(
+ greenwich_sidereal_time=np.pi,
+ declination_from_sat_to_sun=np.pi,
+ right_ascension_from_sat_to_sun=np.pi / 2,
+ ),
+ sat_position=nav.Vector3D(-999, -999, -999),
+ nutation_precession=np.diag([1, 2, 3]).astype(float),
+ )
+ res = nav.transform_satellite_to_earth_fixed_coords(point_sat, orbit, attitude)
+ np.testing.assert_allclose(res, [-3, 1, -2])
+
+ def test_intersect_view_vector_with_earth(self):
+ """Test intersection of a view vector with the earth's surface."""
+ view_vector = nav.Vector3D(-1, 0, 0)
+ ellipsoid = nav.EarthEllipsoid(equatorial_radius=6371 * 1000, flattening=0.003)
+ sat_pos = nav.Vector3D(x=36000 * 1000.0, y=0.0, z=0.0)
+ point = nav.intersect_with_earth(view_vector, sat_pos, ellipsoid)
+ exp = [ellipsoid.equatorial_radius, 0, 0]
+ np.testing.assert_allclose(point, exp)
+
+ @pytest.mark.parametrize(
+ "point_earth_fixed,point_geodetic_exp",
+ [
+ ([0, 0, 1], [0, 90]),
+ ([0, 0, -1], [0, -90]),
+ ([1, 0, 0], [0, 0]),
+ ([-1, 0, 0], [180, 0]),
+ ([1, 1, 1], [45, 35.426852]),
+ ],
+ )
+ def test_transform_earth_fixed_to_geodetic_coords(
+ self, point_earth_fixed, point_geodetic_exp
+ ):
+ """Test transformation from earth-fixed to geodetic coordinates."""
+ point_geodetic = nav.transform_earth_fixed_to_geodetic_coords(
+ nav.Vector3D(*point_earth_fixed),
+ 0.003
+ )
+ np.testing.assert_allclose(point_geodetic, point_geodetic_exp)
+
+ def test_normalize_vector(self):
+ """Test vector normalization."""
+ v = nav.Vector3D(1, 2, 3)
+ norm = np.sqrt(14)
+ exp = nav.Vector3D(1 / norm, 2 / norm, 3 / norm)
+ normed = nav.normalize_vector(v)
+ np.testing.assert_allclose(normed, exp)
+
+
+class TestImageNavigation:
+ """Test navigation of an entire image."""
+
+ @pytest.fixture
+ def expected(self):
+ """Get expected coordinates."""
+ exp = {
+ "lon": [[-114.56923, -112.096837, -109.559702],
+ [8.33221, 8.793893, 9.22339],
+ [15.918476, 16.268354, 16.6332]],
+ "lat": [[-23.078721, -24.629845, -26.133314],
+ [-42.513409, -39.790231, -37.06392],
+ [3.342834, 6.07043, 8.795932]]
+ }
+ return exp
+
+ def test_get_lons_lats(self, navigation_params, expected):
+ """Test getting lon/lat coordinates."""
+ lons, lats = nav.get_lons_lats(
+ lines=np.array([1000, 1500, 2000]),
+ pixels=np.array([1000, 1500, 2000]),
+ nav_params=navigation_params,
+ )
+ np.testing.assert_allclose(lons, expected["lon"])
+ np.testing.assert_allclose(lats, expected["lat"])
+
+
+class TestPredictionInterpolation:
+ """Test interpolation of orbit and attitude predictions."""
+
+ @pytest.mark.parametrize(
+ "obs_time,expected", [(-1, np.nan), (1.5, 2.5), (5, np.nan)]
+ )
+ def test_interpolate_continuous(self, obs_time, expected):
+ """Test interpolation of continuous variables."""
+ prediction_times = np.array([0, 1, 2, 3])
+ predicted_values = np.array([1, 2, 3, 4])
+ res = nav.interpolate_continuous(obs_time, prediction_times, predicted_values)
+ np.testing.assert_allclose(res, expected)
+
+ @pytest.mark.parametrize(
+ "obs_time,expected",
+ [
+ (-1, np.nan),
+ (1.5, 0.75 * np.pi),
+ (2.5, -0.75 * np.pi),
+ (3.5, -0.25 * np.pi),
+ (5, np.nan),
+ ],
+ )
+ def test_interpolate_angles(self, obs_time, expected):
+ """Test interpolation of periodic angles."""
+ prediction_times = np.array([0, 1, 2, 3, 4])
+ predicted_angles = np.array(
+ [0, 0.5 * np.pi, np.pi, 1.5 * np.pi, 2 * np.pi]
+ ) # already unwrapped
+ res = nav.interpolate_angles(obs_time, prediction_times, predicted_angles)
+ np.testing.assert_allclose(res, expected)
+
+ @pytest.mark.parametrize(
+ "obs_time,expected",
+ [
+ (-1, np.nan * np.ones((2, 2))),
+ (1.5, [[1, 0], [0, 2]]),
+ (3, np.nan * np.ones((2, 2))),
+ ],
+ )
+ def test_interpolate_nearest(self, obs_time, expected):
+ """Test nearest neighbour interpolation."""
+ prediction_times = np.array([0, 1, 2])
+ predicted_angles = np.array(
+ [np.zeros((2, 2)), np.diag((1, 2)), np.zeros((2, 2))]
+ )
+ res = nav.interpolate_nearest(obs_time, prediction_times, predicted_angles)
+ np.testing.assert_allclose(res, expected)
+
+ def test_interpolate_orbit_prediction(
+ self, obs_time, orbit_prediction, orbit_expected
+ ):
+ """Test interpolating orbit prediction."""
+ orbit_prediction = orbit_prediction.to_numba()
+ orbit = nav.interpolate_orbit_prediction(orbit_prediction, obs_time)
+ _assert_namedtuple_close(orbit, orbit_expected)
+
+ def test_interpolate_attitude_prediction(
+ self, obs_time, attitude_prediction, attitude_expected
+ ):
+ """Test interpolating attitude prediction."""
+ attitude_prediction = attitude_prediction.to_numba()
+ attitude = nav.interpolate_attitude_prediction(attitude_prediction, obs_time)
+ _assert_namedtuple_close(attitude, attitude_expected)
+
+ @pytest.fixture
+ def obs_time(self):
+ """Get observation time."""
+ return 2.5
+
+ @pytest.fixture
+ def orbit_expected(self):
+ """Get expected orbit."""
+ return nav.Orbit(
+ angles=nav.OrbitAngles(
+ greenwich_sidereal_time=1.5,
+ declination_from_sat_to_sun=1.6,
+ right_ascension_from_sat_to_sun=1.7,
+ ),
+ sat_position=nav.Vector3D(
+ x=1.8,
+ y=1.9,
+ z=2.0,
+ ),
+ nutation_precession=1.6 * np.identity(3),
+ )
+
+ @pytest.fixture
+ def attitude_expected(self):
+ """Get expected attitude."""
+ return nav.Attitude(
+ angle_between_earth_and_sun=1.5,
+ angle_between_sat_spin_and_z_axis=1.6,
+ angle_between_sat_spin_and_yz_plane=1.7,
+ )
+
+
+@pytest.fixture
+def sampling_angle():
+ """Get sampling angle."""
+ return 0.000095719995443
+
+
+@pytest.fixture
+def scan_params(sampling_angle):
+ """Get scanning parameters."""
+ return nav.ScanningParameters(
+ start_time_of_scan=0,
+ spinning_rate=0.5,
+ num_sensors=1,
+ sampling_angle=sampling_angle,
+ )
+
+
+@pytest.fixture
+def attitude_prediction():
+ """Get attitude prediction."""
+ return nav.AttitudePrediction(
+ prediction_times=np.array([1.0, 2.0, 3.0]),
+ attitude=nav.Attitude(
+ angle_between_earth_and_sun=np.array([0.0, 1.0, 2.0]),
+ angle_between_sat_spin_and_z_axis=np.array([0.1, 1.1, 2.1]),
+ angle_between_sat_spin_and_yz_plane=np.array([0.2, 1.2, 2.2]),
+ ),
+ )
+
+
+@pytest.fixture
+def orbit_prediction():
+ """Get orbit prediction."""
+ return nav.OrbitPrediction(
+ prediction_times=np.array([1.0, 2.0, 3.0, 4.0]),
+ angles=nav.OrbitAngles(
+ greenwich_sidereal_time=np.array([0.0, 1.0, 2.0, 3.0]),
+ declination_from_sat_to_sun=np.array([0.1, 1.1, 2.1, 3.1]),
+ right_ascension_from_sat_to_sun=np.array([0.2, 1.2, 2.2, 3.2]),
+ ),
+ sat_position=nav.Vector3D(
+ x=np.array([0.3, 1.3, 2.3, 3.3]),
+ y=np.array([0.4, 1.4, 2.4, 3.4]),
+ z=np.array([0.5, 1.5, 2.5, 3.5]),
+ ),
+ nutation_precession=np.array(
+ [
+ 0.6 * np.identity(3),
+ 1.6 * np.identity(3),
+ 2.6 * np.identity(3),
+ 3.6 * np.identity(3),
+ ]
+ ),
+ )
+
+
+@pytest.fixture
+def proj_params(sampling_angle):
+ """Get projection parameters."""
+ return nav.ProjectionParameters(
+ image_offset=nav.ImageOffset(
+ line_offset=1378.5,
+ pixel_offset=1672.5,
+ ),
+ scanning_angles=nav.ScanningAngles(
+ stepping_angle=0.000140000047395,
+ sampling_angle=sampling_angle,
+ misalignment=np.identity(3).astype(np.float64),
+ ),
+ earth_ellipsoid=nav.EarthEllipsoid(
+ flattening=0.003352813177897,
+ equatorial_radius=6378136,
+ ),
+ )
+
+
+@pytest.fixture
+def static_nav_params(proj_params, scan_params):
+ """Get static navigation parameters."""
+ return nav.StaticNavigationParameters(proj_params, scan_params)
+
+
+@pytest.fixture
+def predicted_nav_params(attitude_prediction, orbit_prediction):
+ """Get predicted navigation parameters."""
+ return nav.PredictedNavigationParameters(attitude_prediction, orbit_prediction)
+
+
+@pytest.fixture
+def navigation_params(static_nav_params, predicted_nav_params):
+ """Get image navigation parameters."""
+ return nav.ImageNavigationParameters(static_nav_params, predicted_nav_params)
+
+
+def test_get_observation_time():
+ """Test getting a pixel's observation time."""
+ scan_params = nav.ScanningParameters(
+ start_time_of_scan=50000.0,
+ spinning_rate=100,
+ num_sensors=1,
+ sampling_angle=0.01,
+ )
+ pixel = nav.Pixel(11, 100)
+ obs_time = nav.get_observation_time(pixel, scan_params)
+ np.testing.assert_allclose(obs_time, 50000.0000705496871047)
+
+
+def _assert_namedtuple_close(a, b):
+ cls_name = b.__class__.__name__
+ assert a.__class__ == b.__class__
+ for attr in b._fields:
+ a_attr = getattr(a, attr)
+ b_attr = getattr(b, attr)
+ if _is_namedtuple(b_attr):
+ _assert_namedtuple_close(a_attr, b_attr)
+ np.testing.assert_allclose(
+ a_attr, b_attr, err_msg=f"{cls_name} attribute {attr} differs"
+ )
+
+
+def _is_namedtuple(obj):
+ return hasattr(obj, "_fields")
diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py
index b8ad4400cb..344f918f8f 100644
--- a/satpy/tests/reader_tests/test_abi_l1b.py
+++ b/satpy/tests/reader_tests/test_abi_l1b.py
@@ -27,93 +27,105 @@
from satpy.tests.utils import make_dataid
+def _create_fake_rad_dataarray(rad=None):
+ x_image = xr.DataArray(0.)
+ y_image = xr.DataArray(0.)
+ time = xr.DataArray(0.)
+ if rad is None:
+ rad_data = (np.arange(10.).reshape((2, 5)) + 1.) * 50.
+ rad_data = (rad_data + 1.) / 0.5
+ rad_data = rad_data.astype(np.int16)
+ rad = xr.DataArray(
+ rad_data,
+ dims=('y', 'x'),
+ attrs={
+ 'scale_factor': 0.5,
+ 'add_offset': -1.,
+ '_FillValue': 1002,
+ 'units': 'W m-2 um-1 sr-1',
+ 'valid_range': (0, 4095),
+ }
+ )
+ rad.coords['t'] = time
+ rad.coords['x_image'] = x_image
+ rad.coords['y_image'] = y_image
+ return rad
+
+
+def _create_fake_rad_dataset(rad=None):
+ rad = _create_fake_rad_dataarray(rad=rad)
+
+ x__ = xr.DataArray(
+ range(5),
+ attrs={'scale_factor': 2., 'add_offset': -1.},
+ dims=('x',)
+ )
+ y__ = xr.DataArray(
+ range(2),
+ attrs={'scale_factor': -2., 'add_offset': 1.},
+ dims=('y',)
+ )
+ proj = xr.DataArray(
+ [],
+ attrs={
+ 'semi_major_axis': 1.,
+ 'semi_minor_axis': 1.,
+ 'perspective_point_height': 1.,
+ 'longitude_of_projection_origin': -90.,
+ 'latitude_of_projection_origin': 0.,
+ 'sweep_angle_axis': u'x'
+ }
+ )
+
+ fake_dataset = xr.Dataset(
+ data_vars={
+ 'Rad': rad,
+ 'band_id': np.array(8),
+ # 'x': x__,
+ # 'y': y__,
+ 'x_image': xr.DataArray(0.),
+ 'y_image': xr.DataArray(0.),
+ 'goes_imager_projection': proj,
+ 'yaw_flip_flag': np.array([1]),
+ "planck_fk1": np.array(13432.1),
+ "planck_fk2": np.array(1497.61),
+ "planck_bc1": np.array(0.09102),
+ "planck_bc2": np.array(0.99971),
+ "esun": np.array(2017),
+ "nominal_satellite_subpoint_lat": np.array(0.0),
+ "nominal_satellite_subpoint_lon": np.array(-89.5),
+ "nominal_satellite_height": np.array(35786.02),
+ "earth_sun_distance_anomaly_in_AU": np.array(0.99)
+ },
+ coords={
+ 't': rad.coords['t'],
+ 'x': x__,
+ 'y': y__,
+
+ },
+ attrs={
+ "time_coverage_start": "2017-09-20T17:30:40.8Z",
+ "time_coverage_end": "2017-09-20T17:41:17.5Z",
+ },
+ )
+ return fake_dataset
+
+
class Test_NC_ABI_L1B_Base(unittest.TestCase):
"""Common setup for NC_ABI_L1B tests."""
@mock.patch('satpy.readers.abi_base.xr')
- def setUp(self, xr_, rad=None):
+ def setUp(self, xr_, rad=None, clip_negative_radiances=False):
"""Create a fake dataset using the given radiance data."""
from satpy.readers.abi_l1b import NC_ABI_L1B
- x_image = xr.DataArray(0.)
- y_image = xr.DataArray(0.)
- time = xr.DataArray(0.)
- if rad is None:
- rad_data = (np.arange(10.).reshape((2, 5)) + 1.) * 50.
- rad_data = (rad_data + 1.) / 0.5
- rad_data = rad_data.astype(np.int16)
- rad = xr.DataArray(
- rad_data,
- dims=('y', 'x'),
- attrs={
- 'scale_factor': 0.5,
- 'add_offset': -1.,
- '_FillValue': 1002,
- 'units': 'W m-2 um-1 sr-1',
- 'valid_range': (0, 4095),
- }
- )
- rad.coords['t'] = time
- rad.coords['x_image'] = x_image
- rad.coords['y_image'] = y_image
- x__ = xr.DataArray(
- range(5),
- attrs={'scale_factor': 2., 'add_offset': -1.},
- dims=('x',)
- )
- y__ = xr.DataArray(
- range(2),
- attrs={'scale_factor': -2., 'add_offset': 1.},
- dims=('y',)
- )
- proj = xr.DataArray(
- [],
- attrs={
- 'semi_major_axis': 1.,
- 'semi_minor_axis': 1.,
- 'perspective_point_height': 1.,
- 'longitude_of_projection_origin': -90.,
- 'latitude_of_projection_origin': 0.,
- 'sweep_angle_axis': u'x'
- }
- )
- fake_dataset = xr.Dataset(
- data_vars={
- 'Rad': rad,
- 'band_id': np.array(8),
- # 'x': x__,
- # 'y': y__,
- 'x_image': x_image,
- 'y_image': y_image,
- 'goes_imager_projection': proj,
- 'yaw_flip_flag': np.array([1]),
- "planck_fk1": np.array(13432.1),
- "planck_fk2": np.array(1497.61),
- "planck_bc1": np.array(0.09102),
- "planck_bc2": np.array(0.99971),
- "esun": np.array(2017),
- "nominal_satellite_subpoint_lat": np.array(0.0),
- "nominal_satellite_subpoint_lon": np.array(-89.5),
- "nominal_satellite_height": np.array(35786.02),
- "earth_sun_distance_anomaly_in_AU": np.array(0.99)
- },
- coords={
- 't': rad.coords['t'],
- 'x': x__,
- 'y': y__,
-
- },
- attrs={
- "time_coverage_start": "2017-09-20T17:30:40.8Z",
- "time_coverage_end": "2017-09-20T17:41:17.5Z",
- },
- )
- xr_.open_dataset.return_value = fake_dataset
+ xr_.open_dataset.return_value = _create_fake_rad_dataset(rad=rad)
self.reader = NC_ABI_L1B('filename',
{'platform_shortname': 'G16', 'observation_type': 'Rad',
'suffix': 'custom',
'scene_abbr': 'C', 'scan_mode': 'M3'},
- {'filetype': 'info'})
+ {'filetype': 'info'},
+ clip_negative_radiances=clip_negative_radiances)
class TestABIYAML:
@@ -200,7 +212,7 @@ def test_get_area_def(self, adef):
class Test_NC_ABI_L1B_ir_cal(Test_NC_ABI_L1B_Base):
- """Test the NC_ABI_L1B reader's IR calibration."""
+ """Test the NC_ABI_L1B reader's default IR calibration."""
def setUp(self):
"""Create fake data for the tests."""
@@ -213,19 +225,16 @@ def setUp(self):
attrs={
'scale_factor': 0.5,
'add_offset': -1.,
- '_FillValue': 1002,
+ '_FillValue': 1002, # last rad_data value
}
)
super(Test_NC_ABI_L1B_ir_cal, self).setUp(rad=rad)
- def test_ir_calibrate(self):
- """Test IR calibration."""
+ def test_ir_calibration_attrs(self):
+ """Test IR calibrated DataArray attributes."""
res = self.reader.get_dataset(
make_dataid(name='C05', calibration='brightness_temperature'), {})
- expected = np.array([[267.55572248, 305.15576503, 332.37383249, 354.73895301, 374.19710115],
- [391.68679226, 407.74064808, 422.69329105, 436.77021913, np.nan]])
- self.assertTrue(np.allclose(res.data, expected, equal_nan=True))
# make sure the attributes from the file are in the data array
self.assertNotIn('scale_factor', res.attrs)
self.assertNotIn('_FillValue', res.attrs)
@@ -233,6 +242,68 @@ def test_ir_calibrate(self):
'toa_brightness_temperature')
self.assertEqual(res.attrs['long_name'], 'Brightness Temperature')
+ def test_clip_negative_radiances_attribute(self):
+ """Assert that clip_negative_radiances is set to False."""
+ assert not self.reader.clip_negative_radiances
+
+ def test_ir_calibrate(self):
+ """Test IR calibration."""
+ res = self.reader.get_dataset(
+ make_dataid(name='C05', calibration='brightness_temperature'), {})
+
+ expected = np.array([[267.55572248, 305.15576503, 332.37383249, 354.73895301, 374.19710115],
+ [391.68679226, 407.74064808, 422.69329105, 436.77021913, np.nan]])
+ assert np.allclose(res.data, expected, equal_nan=True)
+
+
+class Test_NC_ABI_L1B_clipped_ir_cal(Test_NC_ABI_L1B_Base):
+ """Test the NC_ABI_L1B reader's IR calibration (clipping negative radiance)."""
+
+ def setUp(self):
+ """Create fake data for the tests."""
+ values = np.arange(10.)
+ values[0] = -0.0001 # introduce below minimum expected radiance
+ rad_data = (values.reshape((2, 5)) + 1.) * 50.
+ rad_data = (rad_data + 1.) / 0.5
+ rad_data = rad_data.astype(np.int16)
+ rad = xr.DataArray(
+ rad_data,
+ dims=('y', 'x'),
+ attrs={
+ 'scale_factor': 0.5,
+ 'add_offset': -1.,
+ '_FillValue': 1002,
+ }
+ )
+
+ super().setUp(rad=rad, clip_negative_radiances=True)
+
+ def test_clip_negative_radiances_attribute(self):
+ """Assert that clip_negative_radiances has been set to True."""
+ assert self.reader.clip_negative_radiances
+
+ def test_ir_calibrate(self):
+ """Test IR calibration."""
+ res = self.reader.get_dataset(
+ make_dataid(name='C07', calibration='brightness_temperature'), {})
+
+ clipped_ir = 267.07775531
+ expected = np.array([[clipped_ir, 305.15576503, 332.37383249, 354.73895301, 374.19710115],
+ [391.68679226, 407.74064808, 422.69329105, 436.77021913, np.nan]])
+ assert np.allclose(res.data, expected, equal_nan=True)
+
+ def test_get_minimum_radiance(self):
+ """Test get_minimum_radiance from Rad DataArray."""
+ from satpy.readers.abi_l1b import NC_ABI_L1B
+ data = xr.DataArray(
+ attrs={
+ 'scale_factor': 0.5,
+ 'add_offset': -1.,
+ '_FillValue': 1002,
+ }
+ )
+ np.testing.assert_allclose(NC_ABI_L1B._get_minimum_radiance(NC_ABI_L1B, data), 0.0)
+
class Test_NC_ABI_L1B_vis_cal(Test_NC_ABI_L1B_Base):
"""Test the NC_ABI_L1B reader."""
diff --git a/satpy/tests/reader_tests/test_geocat.py b/satpy/tests/reader_tests/test_geocat.py
index b9323a39e2..91de6a4265 100644
--- a/satpy/tests/reader_tests/test_geocat.py
+++ b/satpy/tests/reader_tests/test_geocat.py
@@ -130,10 +130,22 @@ def test_init(self):
loadables = r.select_files_from_pathnames([
'geocatL2.GOES-13.2015143.234500.nc',
])
- self.assertEqual(len(loadables), 1)
+ assert len(loadables) == 1
r.create_filehandlers(loadables)
# make sure we have some files
- self.assertTrue(r.file_handlers)
+ assert r.file_handlers
+
+ def test_init_with_kwargs(self):
+ """Test basic init with extra parameters."""
+ from satpy.readers import load_reader
+ r = load_reader(self.reader_configs, xarray_kwargs={"decode_times": True})
+ loadables = r.select_files_from_pathnames([
+ 'geocatL2.GOES-13.2015143.234500.nc',
+ ])
+ assert len(loadables) == 1
+ r.create_filehandlers(loadables, fh_kwargs={"xarray_kwargs": {'decode_times': True}})
+ # make sure we have some files
+ assert r.file_handlers
def test_load_all_old_goes(self):
"""Test loading all test datasets from old GOES files."""
diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py
index a7de60dcc2..aaf8dc07e2 100644
--- a/satpy/tests/reader_tests/test_seviri_l1b_native.py
+++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py
@@ -1423,6 +1423,14 @@ def test_header_warning():
with pytest.warns(UserWarning, match=exp_warning):
NativeMSGFileHandler('myfile', {}, None)
+ # check that without Main Header the code doesn't crash
+ header_missing = header_good.copy()
+ header_missing.pop('15_MAIN_PRODUCT_HEADER')
+ fromfile.return_value = header_missing
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+ NativeMSGFileHandler('myfile', {}, None)
+
@pytest.mark.parametrize(
"starts_with, expected",
diff --git a/satpy/tests/reader_tests/utils.py b/satpy/tests/reader_tests/utils.py
index dd5b09c86a..9415ac56ec 100644
--- a/satpy/tests/reader_tests/utils.py
+++ b/satpy/tests/reader_tests/utils.py
@@ -17,6 +17,8 @@
# satpy. If not, see .
"""Utilities for reader tests."""
+import inspect
+
def default_attr_processor(root, attr):
"""Do not change the attribute."""
@@ -43,3 +45,19 @@ def fill_h5(root, contents, attr_processor=default_attr_processor):
if "attrs" in val:
for attr_name, attr_val in val["attrs"].items():
root[key].attrs[attr_name] = attr_processor(root, attr_val)
+
+
+def get_jit_methods(module):
+ """Get all jit-compiled methods in a module."""
+ res = {}
+ module_name = module.__name__
+ members = inspect.getmembers(module)
+ for member_name, obj in members:
+ if _is_jit_method(obj):
+ full_name = f"{module_name}.{member_name}"
+ res[full_name] = obj
+ return res
+
+
+def _is_jit_method(obj):
+ return hasattr(obj, "py_func")
diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py
index 0a15fd941f..c62ffcea1d 100644
--- a/satpy/tests/scene_tests/test_conversions.py
+++ b/satpy/tests/scene_tests/test_conversions.py
@@ -80,3 +80,87 @@ def test_geoviews_basic_with_swath(self):
gv_obj = scn.to_geoviews()
# we assume that if we got something back, geoviews can use it
assert gv_obj is not None
+
+
+class TestToXarrayConversion:
+ """Test Scene.to_xarray() conversion."""
+
+ def test_with_empty_scene(self):
+ """Test converting empty Scene to xarray."""
+ scn = Scene()
+ ds = scn.to_xarray()
+ assert isinstance(ds, xr.Dataset)
+ assert len(ds.variables) == 0
+ assert len(ds.coords) == 0
+
+ @pytest.fixture
+ def single_area_scn(self):
+ """Define Scene with single area."""
+ from pyresample.geometry import AreaDefinition
+
+ area = AreaDefinition('test', 'test', 'test',
+ {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0},
+ 2, 2, [-200, -200, 200, 200])
+ data_array = xr.DataArray(da.zeros((2, 2), chunks=-1),
+ dims=('y', 'x'),
+ attrs={'start_time': datetime(2018, 1, 1), 'area': area})
+ scn = Scene()
+ scn['var1'] = data_array
+ return scn
+
+ @pytest.fixture
+ def multi_area_scn(self):
+ """Define Scene with multiple area."""
+ from pyresample.geometry import AreaDefinition
+
+ area1 = AreaDefinition('test', 'test', 'test',
+ {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0},
+ 2, 2, [-200, -200, 200, 200])
+ area2 = AreaDefinition('test', 'test', 'test',
+ {'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0},
+ 4, 4, [-200, -200, 200, 200])
+
+ data_array1 = xr.DataArray(da.zeros((2, 2), chunks=-1),
+ dims=('y', 'x'),
+ attrs={'start_time': datetime(2018, 1, 1), 'area': area1})
+ data_array2 = xr.DataArray(da.zeros((4, 4), chunks=-1),
+ dims=('y', 'x'),
+ attrs={'start_time': datetime(2018, 1, 1), 'area': area2})
+ scn = Scene()
+ scn['var1'] = data_array1
+ scn['var2'] = data_array2
+ return scn
+
+ def test_with_single_area_scene_type(self, single_area_scn):
+ """Test converting single area Scene to xarray dataset."""
+ ds = single_area_scn.to_xarray()
+ assert isinstance(ds, xr.Dataset)
+ assert "var1" in ds.data_vars
+
+ def test_include_lonlats_true(self, single_area_scn):
+ """Test include lonlats."""
+ ds = single_area_scn.to_xarray(include_lonlats=True)
+ assert "latitude" in ds.coords
+ assert "longitude" in ds.coords
+
+ def test_include_lonlats_false(self, single_area_scn):
+ """Test exclude lonlats."""
+ ds = single_area_scn.to_xarray(include_lonlats=False)
+ assert "latitude" not in ds.coords
+ assert "longitude" not in ds.coords
+
+ def test_dataset_string_accepted(self, single_area_scn):
+ """Test accept dataset string."""
+ ds = single_area_scn.to_xarray(datasets="var1")
+ assert isinstance(ds, xr.Dataset)
+
+ def test_wrong_dataset_key(self, single_area_scn):
+ """Test raise error if unexisting dataset."""
+ with pytest.raises(KeyError):
+ _ = single_area_scn.to_xarray(datasets="var2")
+
+ def test_to_xarray_with_multiple_area_scene(self, multi_area_scn):
+ """Test converting muiltple area Scene to xarray."""
+ # TODO: in future adapt for DataTree implementation
+ with pytest.raises(ValueError):
+ _ = multi_area_scn.to_xarray()
diff --git a/satpy/tests/scene_tests/test_resampling.py b/satpy/tests/scene_tests/test_resampling.py
index 19268f9e19..39f9a50092 100644
--- a/satpy/tests/scene_tests/test_resampling.py
+++ b/satpy/tests/scene_tests/test_resampling.py
@@ -560,6 +560,17 @@ def test_aggregate(self):
expected_aggregated_shape = (y_size / 2, x_size / 2)
self._check_aggregation_results(expected_aggregated_shape, scene1, scene2, x_size, y_size)
+ def test_custom_aggregate(self):
+ """Test the aggregate method with custom function."""
+ x_size = 3712
+ y_size = 3712
+
+ scene1 = self._create_test_data(x_size, y_size)
+
+ scene2 = scene1.aggregate(func=np.sum, x=2, y=2)
+ expected_aggregated_shape = (y_size / 2, x_size / 2)
+ self._check_aggregation_results(expected_aggregated_shape, scene1, scene2, x_size, y_size)
+
@staticmethod
def _create_test_data(x_size, y_size):
from pyresample.geometry import AreaDefinition
diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py
index 31a08b8c08..2b0a5dfc6c 100644
--- a/satpy/tests/writer_tests/test_cf.py
+++ b/satpy/tests/writer_tests/test_cf.py
@@ -21,11 +21,9 @@
import logging
import os
import tempfile
-import unittest
import warnings
from collections import OrderedDict
from datetime import datetime
-from unittest import mock
import dask.array as da
import numpy as np
@@ -51,7 +49,7 @@
# - request
-class TempFile(object):
+class TempFile:
"""A temporary filename class."""
def __init__(self, suffix=".nc"):
@@ -70,7 +68,125 @@ def __exit__(self, *args):
os.remove(self.filename)
-class TestCFWriter(unittest.TestCase):
+def test_lonlat_storage(tmp_path):
+ """Test correct storage for area with lon/lat units."""
+ from ..utils import make_fake_scene
+ scn = make_fake_scene(
+ {"ketolysis": np.arange(25).reshape(5, 5)},
+ daskify=True,
+ area=create_area_def("mavas", 4326, shape=(5, 5),
+ center=(0, 0), resolution=(1, 1)))
+
+ filename = os.fspath(tmp_path / "test.nc")
+ scn.save_datasets(filename=filename, writer="cf", include_lonlats=False)
+ with xr.open_dataset(filename) as ds:
+ assert ds["ketolysis"].attrs["grid_mapping"] == "mavas"
+ assert ds["mavas"].attrs["grid_mapping_name"] == "latitude_longitude"
+ assert ds["x"].attrs["units"] == "degrees_east"
+ assert ds["y"].attrs["units"] == "degrees_north"
+ assert ds["mavas"].attrs["longitude_of_prime_meridian"] == 0.0
+ np.testing.assert_allclose(ds["mavas"].attrs["semi_major_axis"], 6378137.0)
+ np.testing.assert_allclose(ds["mavas"].attrs["inverse_flattening"], 298.257223563)
+
+
+def test_da2cf_lonlat():
+ """Test correct da2cf encoding for area with lon/lat units."""
+ from satpy.resample import add_crs_xy_coords
+ from satpy.writers.cf_writer import CFWriter
+
+ area = create_area_def("mavas", 4326, shape=(5, 5),
+ center=(0, 0), resolution=(1, 1))
+ da = xr.DataArray(
+ np.arange(25).reshape(5, 5),
+ dims=("y", "x"),
+ attrs={"area": area})
+ da = add_crs_xy_coords(da, area)
+ new_da = CFWriter.da2cf(da)
+ assert new_da["x"].attrs["units"] == "degrees_east"
+ assert new_da["y"].attrs["units"] == "degrees_north"
+
+
+def test_is_projected(caplog):
+ """Tests for private _is_projected function."""
+ from satpy.writers.cf.crs import _is_projected
+
+ # test case with units but no area
+ da = xr.DataArray(
+ np.arange(25).reshape(5, 5),
+ dims=("y", "x"),
+ coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "m"}),
+ "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "m"})})
+ assert _is_projected(da)
+
+ da = xr.DataArray(
+ np.arange(25).reshape(5, 5),
+ dims=("y", "x"),
+ coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "degrees_east"}),
+ "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "degrees_north"})})
+ assert not _is_projected(da)
+
+ da = xr.DataArray(
+ np.arange(25).reshape(5, 5),
+ dims=("y", "x"))
+ with caplog.at_level(logging.WARNING):
+ assert _is_projected(da)
+ assert "Failed to tell if data are projected." in caplog.text
+
+
+def test_preprocess_dataarray_name():
+ """Test saving an array to netcdf/cf where dataset name starting with a digit with prefix include orig name."""
+ from satpy import Scene
+ from satpy.writers.cf_writer import _preprocess_dataarray_name
+
+ scn = Scene()
+ scn['1'] = xr.DataArray([1, 2, 3])
+ dataarray = scn['1']
+ # If numeric_name_prefix is a string, test add the original_name attributes
+ out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="TEST", include_orig_name=True)
+ assert out_da.attrs['original_name'] == '1'
+
+ # If numeric_name_prefix is empty string, False or None, test do not add original_name attributes
+ out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix="", include_orig_name=True)
+ assert "original_name" not in out_da.attrs
+
+ out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix=False, include_orig_name=True)
+ assert "original_name" not in out_da.attrs
+
+ out_da = _preprocess_dataarray_name(dataarray, numeric_name_prefix=None, include_orig_name=True)
+ assert "original_name" not in out_da.attrs
+
+
+def test_add_time_cf_attrs():
+ """Test addition of CF-compliant time attributes."""
+ from satpy import Scene
+ from satpy.writers.cf_writer import add_time_bounds_dimension
+
+ scn = Scene()
+ test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
+ times = np.array(['2018-05-30T10:05:00', '2018-05-30T10:05:01',
+ '2018-05-30T10:05:02', '2018-05-30T10:05:03'], dtype=np.datetime64)
+ scn['test-array'] = xr.DataArray(test_array,
+ dims=['y', 'x'],
+ coords={'time': ('y', times)},
+ attrs=dict(start_time=times[0], end_time=times[-1]))
+ ds = scn['test-array'].to_dataset(name='test-array')
+ ds = add_time_bounds_dimension(ds)
+ assert "bnds_1d" in ds.dims
+ assert ds.dims['bnds_1d'] == 2
+ assert "time_bnds" in list(ds.data_vars)
+ assert "bounds" in ds["time"].attrs
+ assert "standard_name" in ds["time"].attrs
+
+
+def test_empty_collect_cf_datasets():
+ """Test that if no DataArrays, collect_cf_datasets raise error."""
+ from satpy.writers.cf_writer import collect_cf_datasets
+
+ with pytest.raises(RuntimeError):
+ collect_cf_datasets(list_dataarrays=[])
+
+
+class TestCFWriter:
"""Test case for CF writer."""
def test_init(self):
@@ -94,8 +210,7 @@ def test_save_array(self):
with xr.open_dataset(filename) as f:
np.testing.assert_array_equal(f['test-array'][:], [1, 2, 3])
expected_prereq = ("DataQuery(name='hej')")
- self.assertEqual(f['test-array'].attrs['prerequisites'],
- expected_prereq)
+ assert f['test-array'].attrs['prerequisites'] == expected_prereq
def test_save_array_coords(self):
"""Test saving array with coordinates."""
@@ -123,12 +238,11 @@ def test_save_array_coords(self):
np.testing.assert_array_equal(f['test-array'][:], [[1, 2, 3]])
np.testing.assert_array_equal(f['x'][:], [0, 1, 2])
np.testing.assert_array_equal(f['y'][:], [0])
- self.assertNotIn('crs', f)
- self.assertNotIn('_FillValue', f['x'].attrs)
- self.assertNotIn('_FillValue', f['y'].attrs)
+ assert 'crs' not in f
+ assert '_FillValue' not in f['x'].attrs
+ assert '_FillValue' not in f['y'].attrs
expected_prereq = ("DataQuery(name='hej')")
- self.assertEqual(f['test-array'].attrs['prerequisites'],
- expected_prereq)
+ assert f['test-array'].attrs['prerequisites'] == expected_prereq
def test_save_dataset_a_digit(self):
"""Test saving an array to netcdf/cf where dataset name starting with a digit."""
@@ -156,7 +270,7 @@ def test_save_dataset_a_digit_prefix_include_attr(self):
scn.save_datasets(filename=filename, writer='cf', include_orig_name=True, numeric_name_prefix='TEST')
with xr.open_dataset(filename) as f:
np.testing.assert_array_equal(f['TEST1'][:], [1, 2, 3])
- self.assertEqual(f['TEST1'].attrs['original_name'], '1')
+ assert f['TEST1'].attrs['original_name'] == '1'
def test_save_dataset_a_digit_no_prefix_include_attr(self):
"""Test saving an array to netcdf/cf dataset name starting with a digit with no prefix include orig name."""
@@ -166,7 +280,7 @@ def test_save_dataset_a_digit_no_prefix_include_attr(self):
scn.save_datasets(filename=filename, writer='cf', include_orig_name=True, numeric_name_prefix='')
with xr.open_dataset(filename) as f:
np.testing.assert_array_equal(f['1'][:], [1, 2, 3])
- self.assertNotIn('original_name', f['1'].attrs)
+ assert 'original_name' not in f['1'].attrs
def test_ancillary_variables(self):
"""Test ancillary_variables cited each other."""
@@ -185,10 +299,8 @@ def test_ancillary_variables(self):
with TempFile() as filename:
scn.save_datasets(filename=filename, writer='cf')
with xr.open_dataset(filename) as f:
- self.assertEqual(f['test-array-1'].attrs['ancillary_variables'],
- 'test-array-2')
- self.assertEqual(f['test-array-2'].attrs['ancillary_variables'],
- 'test-array-1')
+ assert f['test-array-1'].attrs['ancillary_variables'] == 'test-array-2'
+ assert f['test-array-2'].attrs['ancillary_variables'] == 'test-array-1'
def test_groups(self):
"""Test creating a file with groups."""
@@ -225,14 +337,14 @@ def test_groups(self):
pretty=True)
nc_root = xr.open_dataset(filename)
- self.assertIn('history', nc_root.attrs)
- self.assertSetEqual(set(nc_root.variables.keys()), set())
+ assert 'history' in nc_root.attrs
+ assert set(nc_root.variables.keys()) == set()
nc_visir = xr.open_dataset(filename, group='visir')
nc_hrv = xr.open_dataset(filename, group='hrv')
- self.assertSetEqual(set(nc_visir.variables.keys()), {'VIS006', 'IR_108', 'y', 'x', 'VIS006_acq_time',
- 'IR_108_acq_time'})
- self.assertSetEqual(set(nc_hrv.variables.keys()), {'HRV', 'y', 'x', 'acq_time'})
+ assert set(nc_visir.variables.keys()) == {'VIS006', 'IR_108',
+ 'y', 'x', 'VIS006_acq_time', 'IR_108_acq_time'}
+ assert set(nc_hrv.variables.keys()) == {'HRV', 'y', 'x', 'acq_time'}
for tst, ref in zip([nc_visir['VIS006'], nc_visir['IR_108'], nc_hrv['HRV']],
[scn['VIS006'], scn['IR_108'], scn['HRV']]):
np.testing.assert_array_equal(tst.data, ref.data)
@@ -242,7 +354,8 @@ def test_groups(self):
# Different projection coordinates in one group are not supported
with TempFile() as filename:
- self.assertRaises(ValueError, scn.save_datasets, datasets=['VIS006', 'HRV'], filename=filename, writer='cf')
+ with pytest.raises(ValueError):
+ scn.save_datasets(datasets=['VIS006', 'HRV'], filename=filename, writer='cf')
def test_single_time_value(self):
"""Test setting a single time value."""
@@ -294,7 +407,7 @@ def test_bounds(self):
with xr.open_dataset(filename, decode_cf=True) as f:
bounds_exp = np.array([[start_time, end_time]], dtype='datetime64[m]')
np.testing.assert_array_equal(f['time_bnds'], bounds_exp)
- self.assertEqual(f['time'].attrs['bounds'], 'time_bnds')
+ assert f['time'].attrs['bounds'] == 'time_bnds'
# Check raw time coordinates & bounds
with xr.open_dataset(filename, decode_cf=False) as f:
@@ -368,7 +481,7 @@ def test_unlimited_dims_kwarg(self):
with TempFile() as filename:
scn.save_datasets(filename=filename, writer='cf', unlimited_dims=['time'])
with xr.open_dataset(filename) as f:
- self.assertSetEqual(f.encoding['unlimited_dims'], {'time'})
+ assert set(f.encoding['unlimited_dims']) == {'time'}
def test_header_attrs(self):
"""Check global attributes are set."""
@@ -393,18 +506,18 @@ def test_header_attrs(self):
flatten_attrs=True,
writer='cf')
with xr.open_dataset(filename) as f:
- self.assertIn('history', f.attrs)
- self.assertEqual(f.attrs['sensor'], 'SEVIRI')
- self.assertEqual(f.attrs['orbit'], 99999)
+ assert 'history' in f.attrs
+ assert f.attrs['sensor'] == 'SEVIRI'
+ assert f.attrs['orbit'] == 99999
np.testing.assert_array_equal(f.attrs['list'], [1, 2, 3])
- self.assertEqual(f.attrs['set'], '{1, 2, 3}')
- self.assertEqual(f.attrs['dict_a'], 1)
- self.assertEqual(f.attrs['dict_b'], 2)
- self.assertEqual(f.attrs['nested_outer_inner1'], 1)
- self.assertEqual(f.attrs['nested_outer_inner2'], 2)
- self.assertEqual(f.attrs['bool'], 'true')
- self.assertEqual(f.attrs['bool_'], 'true')
- self.assertTrue('none' not in f.attrs.keys())
+ assert f.attrs['set'] == '{1, 2, 3}'
+ assert f.attrs['dict_a'] == 1
+ assert f.attrs['dict_b'] == 2
+ assert f.attrs['nested_outer_inner1'] == 1
+ assert f.attrs['nested_outer_inner2'] == 2
+ assert f.attrs['bool'] == 'true'
+ assert f.attrs['bool_'] == 'true'
+ assert 'none' not in f.attrs.keys()
def get_test_attrs(self):
"""Create some dataset attributes for testing purpose.
@@ -435,9 +548,9 @@ def get_test_attrs(self):
'dict': {'a': 1, 'b': 2},
'nested_dict': {'l1': {'l2': {'l3': np.array([1, 2, 3], dtype='uint8')}}},
'raw_metadata': OrderedDict([
- ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])),
- ('flag', np.bool_(True)),
- ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))]))
+ ('recarray', np.zeros(3, dtype=[('x', 'i4'), ('y', 'u1')])),
+ ('flag', np.bool_(True)),
+ ('dict', OrderedDict([('a', 1), ('b', np.array([1, 2, 3], dtype='uint8'))]))
])}
encoded = {'name': 'IR_108',
'start_time': '2018-01-01 00:00:00',
@@ -490,17 +603,17 @@ def get_test_attrs(self):
def assertDictWithArraysEqual(self, d1, d2):
"""Check that dicts containing arrays are equal."""
- self.assertSetEqual(set(d1.keys()), set(d2.keys()))
+ assert set(d1.keys()) == set(d2.keys())
for key, val1 in d1.items():
val2 = d2[key]
if isinstance(val1, np.ndarray):
np.testing.assert_array_equal(val1, val2)
- self.assertEqual(val1.dtype, val2.dtype)
+ assert val1.dtype == val2.dtype
else:
- self.assertEqual(val1, val2)
+ assert val1 == val2
if isinstance(val1, (np.floating, np.integer, np.bool_)):
- self.assertTrue(isinstance(val2, np.generic))
- self.assertEqual(val1.dtype, val2.dtype)
+ assert isinstance(val2, np.generic)
+ assert val1.dtype == val2.dtype
def test_encode_attrs_nc(self):
"""Test attributes encoding."""
@@ -516,10 +629,10 @@ def test_encode_attrs_nc(self):
raw_md_roundtrip = {'recarray': [[0, 0], [0, 0], [0, 0]],
'flag': 'true',
'dict': {'a': 1, 'b': [1, 2, 3]}}
- self.assertDictEqual(json.loads(encoded['raw_metadata']), raw_md_roundtrip)
- self.assertListEqual(json.loads(encoded['array_3d']), [[[1, 2], [3, 4]], [[1, 2], [3, 4]]])
- self.assertDictEqual(json.loads(encoded['nested_dict']), {"l1": {"l2": {"l3": [1, 2, 3]}}})
- self.assertListEqual(json.loads(encoded['nested_list']), ["1", ["2", [3]]])
+ assert json.loads(encoded['raw_metadata']) == raw_md_roundtrip
+ assert json.loads(encoded['array_3d']) == [[[1, 2], [3, 4]], [[1, 2], [3, 4]]]
+ assert json.loads(encoded['nested_dict']) == {"l1": {"l2": {"l3": [1, 2, 3]}}}
+ assert json.loads(encoded['nested_list']) == ["1", ["2", [3]]]
def test_da2cf(self):
"""Test the conversion of a DataArray to a CF-compatible DataArray."""
@@ -550,8 +663,8 @@ def test_da2cf(self):
np.testing.assert_array_equal(res['x'], arr['x'])
np.testing.assert_array_equal(res['y'], arr['y'])
np.testing.assert_array_equal(res['acq_time'], arr['acq_time'])
- self.assertDictEqual(res['x'].attrs, {'units': 'm', 'standard_name': 'projection_x_coordinate'})
- self.assertDictEqual(res['y'].attrs, {'units': 'm', 'standard_name': 'projection_y_coordinate'})
+ assert res['x'].attrs == {'units': 'm', 'standard_name': 'projection_x_coordinate'}
+ assert res['y'].attrs == {'units': 'm', 'standard_name': 'projection_y_coordinate'}
self.assertDictWithArraysEqual(res.attrs, attrs_expected)
# Test attribute kwargs
@@ -567,10 +680,9 @@ def test_da2cf_one_dimensional_array(self):
coords={'y': [0, 1, 2, 3], 'acq_time': ('y', [0, 1, 2, 3])})
_ = CFWriter.da2cf(arr)
- @mock.patch('satpy.writers.cf_writer.CFWriter.__init__', return_value=None)
- def test_collect_datasets(self, *mocks):
+ def test_collect_cf_dataarrays(self):
"""Test collecting CF datasets from a DataArray objects."""
- from satpy.writers.cf_writer import CFWriter
+ from satpy.writers.cf_writer import _collect_cf_dataset
geos = pyresample.geometry.AreaDefinition(
area_id='geos',
@@ -587,30 +699,26 @@ def test_collect_datasets(self, *mocks):
time = [1, 2]
tstart = datetime(2019, 4, 1, 12, 0)
tend = datetime(2019, 4, 1, 12, 15)
- datasets = [xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)},
- attrs={'name': 'var1', 'start_time': tstart, 'end_time': tend, 'area': geos}),
- xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)},
- attrs={'name': 'var2', 'long_name': 'variable 2'})]
+ list_dataarrays = [xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)},
+ attrs={'name': 'var1', 'start_time': tstart, 'end_time': tend, 'area': geos}),
+ xr.DataArray(data=data, dims=('y', 'x'), coords={'y': y, 'x': x, 'acq_time': ('y', time)},
+ attrs={'name': 'var2', 'long_name': 'variable 2'})]
# Collect datasets
- writer = CFWriter()
- datas, start_times, end_times = writer._collect_datasets(datasets, include_lonlats=True)
+ ds = _collect_cf_dataset(list_dataarrays, include_lonlats=True)
# Test results
- self.assertEqual(len(datas), 3)
- self.assertEqual(set(datas.keys()), {'var1', 'var2', 'geos'})
- self.assertListEqual(start_times, [None, tstart, None])
- self.assertListEqual(end_times, [None, tend, None])
- var1 = datas['var1']
- var2 = datas['var2']
- self.assertEqual(var1.name, 'var1')
- self.assertEqual(var1.attrs['grid_mapping'], 'geos')
- self.assertEqual(var1.attrs['start_time'], '2019-04-01 12:00:00')
- self.assertEqual(var1.attrs['end_time'], '2019-04-01 12:15:00')
- self.assertEqual(var1.attrs['long_name'], 'var1')
+ assert len(ds.keys()) == 3
+ assert set(ds.keys()) == {'var1', 'var2', 'geos'}
+
+ da_var1 = ds['var1']
+ da_var2 = ds['var2']
+ assert da_var1.name == 'var1'
+ assert da_var1.attrs['grid_mapping'] == 'geos'
+ assert da_var1.attrs['long_name'] == 'var1'
# variable 2
- self.assertNotIn('grid_mapping', var2.attrs)
- self.assertEqual(var2.attrs['long_name'], 'variable 2')
+ assert 'grid_mapping' not in da_var2.attrs
+ assert da_var2.attrs['long_name'] == 'variable 2'
def test_assert_xy_unique(self):
"""Test that the x and y coordinates are unique."""
@@ -623,7 +731,8 @@ def test_assert_xy_unique(self):
assert_xy_unique(datas)
datas['c'] = xr.DataArray(data=dummy, dims=('y', 'x'), coords={'y': [1, 3], 'x': [3, 4]})
- self.assertRaises(ValueError, assert_xy_unique, datas)
+ with pytest.raises(ValueError):
+ assert_xy_unique(datas)
def test_link_coords(self):
"""Check that coordinates link has been established correctly."""
@@ -646,19 +755,19 @@ def test_link_coords(self):
link_coords(datasets)
# Check that link has been established correctly and 'coordinate' atrribute has been dropped
- self.assertIn('lon', datasets['var1'].coords)
- self.assertIn('lat', datasets['var1'].coords)
+ assert 'lon' in datasets['var1'].coords
+ assert 'lat' in datasets['var1'].coords
np.testing.assert_array_equal(datasets['var1']['lon'].data, lon)
np.testing.assert_array_equal(datasets['var1']['lat'].data, lat)
- self.assertNotIn('coordinates', datasets['var1'].attrs)
+ assert 'coordinates' not in datasets['var1'].attrs
# There should be no link if there was no 'coordinate' attribute
- self.assertNotIn('lon', datasets['var2'].coords)
- self.assertNotIn('lat', datasets['var2'].coords)
+ assert 'lon' not in datasets['var2'].coords
+ assert 'lat' not in datasets['var2'].coords
- # The non-existant dimension or coordinate should be dropped
- self.assertNotIn('time', datasets['var3'].coords)
- self.assertNotIn('not_exist', datasets['var4'].coords)
+ # The non-existent dimension or coordinate should be dropped
+ assert 'time' not in datasets['var3'].coords
+ assert 'not_exist' not in datasets['var4'].coords
def test_make_alt_coords_unique(self):
"""Test that created coordinate variables are unique."""
@@ -680,8 +789,8 @@ def test_make_alt_coords_unique(self):
res = make_alt_coords_unique(datasets)
np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1)
np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2)
- self.assertNotIn('acq_time', res['var1'].coords)
- self.assertNotIn('acq_time', res['var2'].coords)
+ assert 'acq_time' not in res['var1'].coords
+ assert 'acq_time' not in res['var2'].coords
# Make sure nothing else is modified
np.testing.assert_array_equal(res['var1']['x'], x)
@@ -690,21 +799,20 @@ def test_make_alt_coords_unique(self):
np.testing.assert_array_equal(res['var2']['y'], y)
# Coords not unique -> Dataset names must be prepended, even if pretty=True
- with mock.patch('satpy.writers.cf_writer.warnings.warn') as warn:
+ with pytest.warns(UserWarning, match='Cannot pretty-format "acq_time"'):
res = make_alt_coords_unique(datasets, pretty=True)
- warn.assert_called()
- np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1)
- np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2)
- self.assertNotIn('acq_time', res['var1'].coords)
- self.assertNotIn('acq_time', res['var2'].coords)
+ np.testing.assert_array_equal(res['var1']['var1_acq_time'], time1)
+ np.testing.assert_array_equal(res['var2']['var2_acq_time'], time2)
+ assert 'acq_time' not in res['var1'].coords
+ assert 'acq_time' not in res['var2'].coords
# Coords unique and pretty=True -> Don't modify coordinate names
datasets['var2']['acq_time'] = ('y', time1)
res = make_alt_coords_unique(datasets, pretty=True)
np.testing.assert_array_equal(res['var1']['acq_time'], time1)
np.testing.assert_array_equal(res['var2']['acq_time'], time1)
- self.assertNotIn('var1_acq_time', res['var1'].coords)
- self.assertNotIn('var2_acq_time', res['var2'].coords)
+ assert 'var1_acq_time' not in res['var1'].coords
+ assert 'var2_acq_time' not in res['var2'].coords
def test_area2cf(self):
"""Test the conversion of an area to CF standards."""
@@ -724,44 +832,44 @@ def test_area2cf(self):
ds = ds_base.copy(deep=True)
ds.attrs['area'] = geos
- res = area2cf(ds)
- self.assertEqual(len(res), 2)
- self.assertEqual(res[0].size, 1) # grid mapping variable
- self.assertEqual(res[0].name, res[1].attrs['grid_mapping'])
+ res = area2cf(ds, include_lonlats=False)
+ assert len(res) == 2
+ assert res[0].size == 1 # grid mapping variable
+ assert res[0].name == res[1].attrs['grid_mapping']
- # b) Area Definition and strict=False
+ # b) Area Definition and include_lonlats=False
ds = ds_base.copy(deep=True)
ds.attrs['area'] = geos
- res = area2cf(ds, strict=True)
+ res = area2cf(ds, include_lonlats=True)
# same as above
- self.assertEqual(len(res), 2)
- self.assertEqual(res[0].size, 1) # grid mapping variable
- self.assertEqual(res[0].name, res[1].attrs['grid_mapping'])
+ assert len(res) == 2
+ assert res[0].size == 1 # grid mapping variable
+ assert res[0].name == res[1].attrs['grid_mapping']
# but now also have the lon/lats
- self.assertIn('longitude', res[1].coords)
- self.assertIn('latitude', res[1].coords)
+ assert 'longitude' in res[1].coords
+ assert 'latitude' in res[1].coords
# c) Swath Definition
swath = pyresample.geometry.SwathDefinition(lons=[[1, 1], [2, 2]], lats=[[1, 2], [1, 2]])
ds = ds_base.copy(deep=True)
ds.attrs['area'] = swath
- res = area2cf(ds)
- self.assertEqual(len(res), 1)
- self.assertIn('longitude', res[0].coords)
- self.assertIn('latitude', res[0].coords)
- self.assertNotIn('grid_mapping', res[0].attrs)
+ res = area2cf(ds, include_lonlats=False)
+ assert len(res) == 1
+ assert 'longitude' in res[0].coords
+ assert 'latitude' in res[0].coords
+ assert 'grid_mapping' not in res[0].attrs
- def test_area2gridmapping(self):
+ def test__add_grid_mapping(self):
"""Test the conversion from pyresample area object to CF grid mapping."""
- from satpy.writers.cf_writer import area2gridmapping
+ from satpy.writers.cf_writer import _add_grid_mapping
def _gm_matches(gmapping, expected):
"""Assert that all keys in ``expected`` match the values in ``gmapping``."""
for attr_key, attr_val in expected.attrs.items():
test_val = gmapping.attrs[attr_key]
if attr_val is None or isinstance(attr_val, str):
- self.assertEqual(test_val, attr_val)
+ assert test_val == attr_val
else:
np.testing.assert_almost_equal(test_val, attr_val, decimal=3)
@@ -792,15 +900,15 @@ def _gm_matches(gmapping, expected):
ds = ds_base.copy()
ds.attrs['area'] = geos
- new_ds, grid_mapping = area2gridmapping(ds)
+ new_ds, grid_mapping = _add_grid_mapping(ds)
if 'sweep_angle_axis' in grid_mapping.attrs:
# older versions of pyproj might not include this
- self.assertEqual(grid_mapping.attrs['sweep_angle_axis'], 'y')
+ assert grid_mapping.attrs['sweep_angle_axis'] == 'y'
- self.assertEqual(new_ds.attrs['grid_mapping'], 'geos')
+ assert new_ds.attrs['grid_mapping'] == 'geos'
_gm_matches(grid_mapping, geos_expected)
# should not have been modified
- self.assertNotIn('grid_mapping', ds.attrs)
+ assert 'grid_mapping' not in ds.attrs
# b) Projection does not have a corresponding CF representation (COSMO)
cosmo7 = pyresample.geometry.AreaDefinition(
@@ -816,15 +924,15 @@ def _gm_matches(gmapping, expected):
ds = ds_base.copy()
ds.attrs['area'] = cosmo7
- new_ds, grid_mapping = area2gridmapping(ds)
- self.assertIn('crs_wkt', grid_mapping.attrs)
+ new_ds, grid_mapping = _add_grid_mapping(ds)
+ assert 'crs_wkt' in grid_mapping.attrs
wkt = grid_mapping.attrs['crs_wkt']
- self.assertIn('ELLIPSOID["WGS 84"', wkt)
- self.assertIn('PARAMETER["lat_0",46', wkt)
- self.assertIn('PARAMETER["lon_0",4.535', wkt)
- self.assertIn('PARAMETER["o_lat_p",90', wkt)
- self.assertIn('PARAMETER["o_lon_p",-5.465', wkt)
- self.assertEqual(new_ds.attrs['grid_mapping'], 'cosmo7')
+ assert 'ELLIPSOID["WGS 84"' in wkt
+ assert 'PARAMETER["lat_0",46' in wkt
+ assert 'PARAMETER["lon_0",4.535' in wkt
+ assert 'PARAMETER["o_lat_p",90' in wkt
+ assert 'PARAMETER["o_lon_p",-5.465' in wkt
+ assert new_ds.attrs['grid_mapping'] == 'cosmo7'
# c) Projection Transverse Mercator
lat_0 = 36.5
@@ -849,8 +957,8 @@ def _gm_matches(gmapping, expected):
ds = ds_base.copy()
ds.attrs['area'] = tmerc
- new_ds, grid_mapping = area2gridmapping(ds)
- self.assertEqual(new_ds.attrs['grid_mapping'], 'tmerc')
+ new_ds, grid_mapping = _add_grid_mapping(ds)
+ assert new_ds.attrs['grid_mapping'] == 'tmerc'
_gm_matches(grid_mapping, tmerc_expected)
# d) Projection that has a representation but no explicit a/b
@@ -875,9 +983,9 @@ def _gm_matches(gmapping, expected):
ds = ds_base.copy()
ds.attrs['area'] = geos
- new_ds, grid_mapping = area2gridmapping(ds)
+ new_ds, grid_mapping = _add_grid_mapping(ds)
- self.assertEqual(new_ds.attrs['grid_mapping'], 'geos')
+ assert new_ds.attrs['grid_mapping'] == 'geos'
_gm_matches(grid_mapping, geos_expected)
# e) oblique Mercator
@@ -906,9 +1014,9 @@ def _gm_matches(gmapping, expected):
ds = ds_base.copy()
ds.attrs['area'] = area
- new_ds, grid_mapping = area2gridmapping(ds)
+ new_ds, grid_mapping = _add_grid_mapping(ds)
- self.assertEqual(new_ds.attrs['grid_mapping'], 'omerc_otf')
+ assert new_ds.attrs['grid_mapping'] == 'omerc_otf'
_gm_matches(grid_mapping, omerc_expected)
# f) Projection that has a representation but no explicit a/b
@@ -931,14 +1039,14 @@ def _gm_matches(gmapping, expected):
ds = ds_base.copy()
ds.attrs['area'] = geos
- new_ds, grid_mapping = area2gridmapping(ds)
+ new_ds, grid_mapping = _add_grid_mapping(ds)
- self.assertEqual(new_ds.attrs['grid_mapping'], 'geos')
+ assert new_ds.attrs['grid_mapping'] == 'geos'
_gm_matches(grid_mapping, geos_expected)
- def test_area2lonlat(self):
+ def test_add_lonlat_coords(self):
"""Test the conversion from areas to lon/lat."""
- from satpy.writers.cf_writer import area2lonlat
+ from satpy.writers.cf_writer import add_lonlat_coords
area = pyresample.geometry.AreaDefinition(
'seviri',
@@ -951,11 +1059,11 @@ def test_area2lonlat(self):
lons_ref, lats_ref = area.get_lonlats()
dataarray = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), attrs={'area': area})
- res = area2lonlat(dataarray)
+ res = add_lonlat_coords(dataarray)
# original should be unmodified
- self.assertNotIn('longitude', dataarray.coords)
- self.assertEqual(set(res.coords), {'longitude', 'latitude'})
+ assert 'longitude' not in dataarray.coords
+ assert set(res.coords) == {'longitude', 'latitude'}
lat = res['latitude']
lon = res['longitude']
np.testing.assert_array_equal(lat.data, lats_ref)
@@ -972,13 +1080,13 @@ def test_area2lonlat(self):
[-5570248.686685662, -5567248.28340708, 5567248.28340708, 5570248.686685662]
)
lons_ref, lats_ref = area.get_lonlats()
- dataarray = xr.DataArray(data=da.from_array(np.arange(3*10*10).reshape(3, 10, 10), chunks=(1, 5, 5)),
+ dataarray = xr.DataArray(data=da.from_array(np.arange(3 * 10 * 10).reshape(3, 10, 10), chunks=(1, 5, 5)),
dims=('bands', 'y', 'x'), attrs={'area': area})
- res = area2lonlat(dataarray)
+ res = add_lonlat_coords(dataarray)
# original should be unmodified
- self.assertNotIn('longitude', dataarray.coords)
- self.assertEqual(set(res.coords), {'longitude', 'latitude'})
+ assert 'longitude' not in dataarray.coords
+ assert set(res.coords) == {'longitude', 'latitude'}
lat = res['latitude']
lon = res['longitude']
np.testing.assert_array_equal(lat.data, lats_ref)
@@ -1014,8 +1122,8 @@ def test_global_attr_default_history_and_Conventions(self):
with TempFile() as filename:
scn.save_datasets(filename=filename, writer='cf')
with xr.open_dataset(filename) as f:
- self.assertEqual(f.attrs['Conventions'], 'CF-1.7')
- self.assertIn('Created by pytroll/satpy on', f.attrs['history'])
+ assert f.attrs['Conventions'] == 'CF-1.7'
+ assert 'Created by pytroll/satpy on' in f.attrs['history']
def test_global_attr_history_and_Conventions(self):
"""Test saving global attributes history and Conventions."""
@@ -1033,81 +1141,17 @@ def test_global_attr_history_and_Conventions(self):
with TempFile() as filename:
scn.save_datasets(filename=filename, writer='cf', header_attrs=header_attrs)
with xr.open_dataset(filename) as f:
- self.assertEqual(f.attrs['Conventions'], 'CF-1.7, ACDD-1.3')
- self.assertIn('TEST add history\n', f.attrs['history'])
- self.assertIn('Created by pytroll/satpy on', f.attrs['history'])
+ assert f.attrs['Conventions'] == 'CF-1.7, ACDD-1.3'
+ assert 'TEST add history\n' in f.attrs['history']
+ assert 'Created by pytroll/satpy on' in f.attrs['history']
-def test_lonlat_storage(tmp_path):
- """Test correct storage for area with lon/lat units."""
- from ..utils import make_fake_scene
- scn = make_fake_scene(
- {"ketolysis": np.arange(25).reshape(5, 5)},
- daskify=True,
- area=create_area_def("mavas", 4326, shape=(5, 5),
- center=(0, 0), resolution=(1, 1)))
-
- filename = os.fspath(tmp_path / "test.nc")
- scn.save_datasets(filename=filename, writer="cf", include_lonlats=False)
- with xr.open_dataset(filename) as ds:
- assert ds["ketolysis"].attrs["grid_mapping"] == "mavas"
- assert ds["mavas"].attrs["grid_mapping_name"] == "latitude_longitude"
- assert ds["x"].attrs["units"] == "degrees_east"
- assert ds["y"].attrs["units"] == "degrees_north"
- assert ds["mavas"].attrs["longitude_of_prime_meridian"] == 0.0
- np.testing.assert_allclose(ds["mavas"].attrs["semi_major_axis"], 6378137.0)
- np.testing.assert_allclose(ds["mavas"].attrs["inverse_flattening"], 298.257223563)
-
-
-def test_da2cf_lonlat():
- """Test correct da2cf encoding for area with lon/lat units."""
- from satpy.resample import add_crs_xy_coords
- from satpy.writers.cf_writer import CFWriter
-
- area = create_area_def("mavas", 4326, shape=(5, 5),
- center=(0, 0), resolution=(1, 1))
- da = xr.DataArray(
- np.arange(25).reshape(5, 5),
- dims=("y", "x"),
- attrs={"area": area})
- da = add_crs_xy_coords(da, area)
- new_da = CFWriter.da2cf(da)
- assert new_da["x"].attrs["units"] == "degrees_east"
- assert new_da["y"].attrs["units"] == "degrees_north"
-
-
-def test_is_projected(caplog):
- """Tests for private _is_projected function."""
- from satpy.writers.cf_writer import CFWriter
-
- # test case with units but no area
- da = xr.DataArray(
- np.arange(25).reshape(5, 5),
- dims=("y", "x"),
- coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "m"}),
- "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "m"})})
- assert CFWriter._is_projected(da)
-
- da = xr.DataArray(
- np.arange(25).reshape(5, 5),
- dims=("y", "x"),
- coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "degrees_east"}),
- "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "degrees_north"})})
- assert not CFWriter._is_projected(da)
-
- da = xr.DataArray(
- np.arange(25).reshape(5, 5),
- dims=("y", "x"))
- with caplog.at_level(logging.WARNING):
- assert CFWriter._is_projected(da)
- assert "Failed to tell if data are projected." in caplog.text
-
-
-class TestCFWriterData(unittest.TestCase):
+class TestCFWriterData:
"""Test case for CF writer where data arrays are needed."""
- def setUp(self):
- """Create some test data."""
+ @pytest.fixture
+ def datasets(self):
+ """Create test dataset."""
data = [[75, 2], [3, 4]]
y = [1, 2]
x = [1, 2]
@@ -1118,146 +1162,166 @@ def setUp(self):
projection={'proj': 'geos', 'h': 35785831., 'a': 6378169., 'b': 6356583.8},
width=2, height=2,
area_extent=[-1, -1, 1, 1])
- self.datasets = {'var1': xr.DataArray(data=data,
- dims=('y', 'x'),
- coords={'y': y, 'x': x}),
- 'var2': xr.DataArray(data=data,
- dims=('y', 'x'),
- coords={'y': y, 'x': x}),
- 'lat': xr.DataArray(data=data,
- dims=('y', 'x'),
- coords={'y': y, 'x': x}),
- 'lon': xr.DataArray(data=data,
- dims=('y', 'x'),
- coords={'y': y, 'x': x})}
- self.datasets['lat'].attrs['standard_name'] = 'latitude'
- self.datasets['var1'].attrs['standard_name'] = 'dummy'
- self.datasets['var2'].attrs['standard_name'] = 'dummy'
- self.datasets['var2'].attrs['area'] = geos
- self.datasets['var1'].attrs['area'] = geos
- self.datasets['lat'].attrs['name'] = 'lat'
- self.datasets['var1'].attrs['name'] = 'var1'
- self.datasets['var2'].attrs['name'] = 'var2'
- self.datasets['lon'].attrs['name'] = 'lon'
-
- def test_dataset_is_projection_coords(self):
- """Test the dataset_is_projection_coords function."""
- from satpy.writers.cf_writer import dataset_is_projection_coords
- self.assertTrue(dataset_is_projection_coords(self.datasets['lat']))
- self.assertFalse(dataset_is_projection_coords(self.datasets['var1']))
-
- def test_has_projection_coords(self):
+ datasets = {
+ 'var1': xr.DataArray(data=data,
+ dims=('y', 'x'),
+ coords={'y': y, 'x': x}),
+ 'var2': xr.DataArray(data=data,
+ dims=('y', 'x'),
+ coords={'y': y, 'x': x}),
+ 'lat': xr.DataArray(data=data,
+ dims=('y', 'x'),
+ coords={'y': y, 'x': x}),
+ 'lon': xr.DataArray(data=data,
+ dims=('y', 'x'),
+ coords={'y': y, 'x': x})}
+ datasets['lat'].attrs['standard_name'] = 'latitude'
+ datasets['var1'].attrs['standard_name'] = 'dummy'
+ datasets['var2'].attrs['standard_name'] = 'dummy'
+ datasets['var2'].attrs['area'] = geos
+ datasets['var1'].attrs['area'] = geos
+ datasets['lat'].attrs['name'] = 'lat'
+ datasets['var1'].attrs['name'] = 'var1'
+ datasets['var2'].attrs['name'] = 'var2'
+ datasets['lon'].attrs['name'] = 'lon'
+ return datasets
+
+ def test_is_lon_or_lat_dataarray(self, datasets):
+ """Test the is_lon_or_lat_dataarray function."""
+ from satpy.writers.cf_writer import is_lon_or_lat_dataarray
+
+ assert is_lon_or_lat_dataarray(datasets['lat'])
+ assert not is_lon_or_lat_dataarray(datasets['var1'])
+
+ def test_has_projection_coords(self, datasets):
"""Test the has_projection_coords function."""
from satpy.writers.cf_writer import has_projection_coords
- self.assertTrue(has_projection_coords(self.datasets))
- self.datasets['lat'].attrs['standard_name'] = 'dummy'
- self.assertFalse(has_projection_coords(self.datasets))
- @mock.patch('satpy.writers.cf_writer.CFWriter.__init__', return_value=None)
- def test_collect_datasets_with_latitude_named_lat(self, *mocks):
- """Test collecting CF datasets with latitude named lat."""
- from operator import getitem
+ assert has_projection_coords(datasets)
+ datasets['lat'].attrs['standard_name'] = 'dummy'
+ assert not has_projection_coords(datasets)
- from satpy.writers.cf_writer import CFWriter
+ def test_collect_cf_dataarrays_with_latitude_named_lat(self, datasets):
+ """Test collecting CF datasets with latitude named lat."""
+ from satpy.writers.cf_writer import _collect_cf_dataset
- self.datasets_list = [self.datasets[key] for key in self.datasets]
- self.datasets_list_no_latlon = [self.datasets[key] for key in ['var1', 'var2']]
+ datasets_list = [datasets[key] for key in datasets.keys()]
+ datasets_list_no_latlon = [datasets[key] for key in ['var1', 'var2']]
# Collect datasets
- writer = CFWriter()
- datas, start_times, end_times = writer._collect_datasets(self.datasets_list, include_lonlats=True)
- datas2, start_times, end_times = writer._collect_datasets(self.datasets_list_no_latlon, include_lonlats=True)
- # Test results
+ ds = _collect_cf_dataset(datasets_list, include_lonlats=True)
+ ds2 = _collect_cf_dataset(datasets_list_no_latlon, include_lonlats=True)
- self.assertEqual(len(datas), 5)
- self.assertEqual(set(datas.keys()), {'var1', 'var2', 'lon', 'lat', 'geos'})
- self.assertRaises(KeyError, getitem, datas['var1'], 'latitude')
- self.assertRaises(KeyError, getitem, datas['var1'], 'longitude')
- self.assertEqual(datas2['var1']['latitude'].attrs['name'], 'latitude')
- self.assertEqual(datas2['var1']['longitude'].attrs['name'], 'longitude')
+ # Test results
+ assert len(ds.keys()) == 5
+ assert set(ds.keys()) == {'var1', 'var2', 'lon', 'lat', 'geos'}
+ with pytest.raises(KeyError):
+ ds['var1'].attrs["latitude"]
+ with pytest.raises(KeyError):
+ ds['var1'].attrs["longitude"]
+ assert ds2['var1']['latitude'].attrs['name'] == 'latitude'
+ assert ds2['var1']['longitude'].attrs['name'] == 'longitude'
-class EncodingUpdateTest(unittest.TestCase):
+class EncodingUpdateTest:
"""Test update of netCDF encoding."""
- def setUp(self):
+ @pytest.fixture
+ def fake_ds(self):
+ """Create fake data for testing."""
+ ds = xr.Dataset({'foo': (('y', 'x'), [[1, 2], [3, 4]]),
+ 'bar': (('y', 'x'), [[3, 4], [5, 6]])},
+ coords={'y': [1, 2],
+ 'x': [3, 4],
+ 'lon': (('y', 'x'), [[7, 8], [9, 10]])})
+ return ds
+
+ @pytest.fixture
+ def fake_ds_digit(self):
"""Create fake data for testing."""
- self.ds = xr.Dataset({'foo': (('y', 'x'), [[1, 2], [3, 4]]),
- 'bar': (('y', 'x'), [[3, 4], [5, 6]])},
- coords={'y': [1, 2],
- 'x': [3, 4],
- 'lon': (('y', 'x'), [[7, 8], [9, 10]])})
- self.ds_digit = xr.Dataset({'CHANNEL_1': (('y', 'x'), [[1, 2], [3, 4]]),
- 'CHANNEL_2': (('y', 'x'), [[3, 4], [5, 6]])},
- coords={'y': [1, 2],
- 'x': [3, 4],
- 'lon': (('y', 'x'), [[7, 8], [9, 10]])})
-
- def test_dataset_name_digit(self):
+ ds_digit = xr.Dataset({'CHANNEL_1': (('y', 'x'), [[1, 2], [3, 4]]),
+ 'CHANNEL_2': (('y', 'x'), [[3, 4], [5, 6]])},
+ coords={'y': [1, 2],
+ 'x': [3, 4],
+ 'lon': (('y', 'x'), [[7, 8], [9, 10]])})
+ return ds_digit
+
+ def test_dataset_name_digit(self, fake_ds_digit):
"""Test data with dataset name staring with a digit."""
from satpy.writers.cf_writer import update_encoding
# Dataset with name staring with digit
- ds = self.ds_digit
+ ds_digit = fake_ds_digit
kwargs = {'encoding': {'1': {'dtype': 'float32'},
'2': {'dtype': 'float32'}},
'other': 'kwargs'}
- enc, other_kwargs = update_encoding(ds, kwargs, numeric_name_prefix='CHANNEL_')
- self.assertDictEqual(enc, {'y': {'_FillValue': None},
- 'x': {'_FillValue': None},
- 'CHANNEL_1': {'dtype': 'float32'},
- 'CHANNEL_2': {'dtype': 'float32'}})
- self.assertDictEqual(other_kwargs, {'other': 'kwargs'})
-
- def test_without_time(self):
+ enc, other_kwargs = update_encoding(ds_digit, kwargs, numeric_name_prefix='CHANNEL_')
+ expected_dict = {
+ 'y': {'_FillValue': None},
+ 'x': {'_FillValue': None},
+ 'CHANNEL_1': {'dtype': 'float32'},
+ 'CHANNEL_2': {'dtype': 'float32'}
+ }
+ assert enc == expected_dict
+ assert other_kwargs == {'other': 'kwargs'}
+
+ def test_without_time(self, fake_ds):
"""Test data with no time dimension."""
from satpy.writers.cf_writer import update_encoding
# Without time dimension
- ds = self.ds.chunk(2)
+ ds = fake_ds.chunk(2)
kwargs = {'encoding': {'bar': {'chunksizes': (1, 1)}},
'other': 'kwargs'}
enc, other_kwargs = update_encoding(ds, kwargs)
- self.assertDictEqual(enc, {'y': {'_FillValue': None},
- 'x': {'_FillValue': None},
- 'lon': {'chunksizes': (2, 2)},
- 'foo': {'chunksizes': (2, 2)},
- 'bar': {'chunksizes': (1, 1)}})
- self.assertDictEqual(other_kwargs, {'other': 'kwargs'})
+ expected_dict = {
+ 'y': {'_FillValue': None},
+ 'x': {'_FillValue': None},
+ 'lon': {'chunksizes': (2, 2)},
+ 'foo': {'chunksizes': (2, 2)},
+ 'bar': {'chunksizes': (1, 1)}
+ }
+ assert enc == expected_dict
+ assert other_kwargs == {'other': 'kwargs'}
# Chunksize may not exceed shape
- ds = self.ds.chunk(8)
+ ds = fake_ds.chunk(8)
kwargs = {'encoding': {}, 'other': 'kwargs'}
enc, other_kwargs = update_encoding(ds, kwargs)
- self.assertDictEqual(enc, {'y': {'_FillValue': None},
- 'x': {'_FillValue': None},
- 'lon': {'chunksizes': (2, 2)},
- 'foo': {'chunksizes': (2, 2)},
- 'bar': {'chunksizes': (2, 2)}})
+ expected_dict = {
+ 'y': {'_FillValue': None},
+ 'x': {'_FillValue': None},
+ 'lon': {'chunksizes': (2, 2)},
+ 'foo': {'chunksizes': (2, 2)},
+ 'bar': {'chunksizes': (2, 2)}
+ }
+ assert enc == expected_dict
- def test_with_time(self):
+ def test_with_time(self, fake_ds):
"""Test data with a time dimension."""
from satpy.writers.cf_writer import update_encoding
# With time dimension
- ds = self.ds.chunk(8).expand_dims({'time': [datetime(2009, 7, 1, 12, 15)]})
+ ds = fake_ds.chunk(8).expand_dims({'time': [datetime(2009, 7, 1, 12, 15)]})
kwargs = {'encoding': {'bar': {'chunksizes': (1, 1, 1)}},
'other': 'kwargs'}
enc, other_kwargs = update_encoding(ds, kwargs)
- self.assertDictEqual(enc, {'y': {'_FillValue': None},
- 'x': {'_FillValue': None},
- 'lon': {'chunksizes': (2, 2)},
- 'foo': {'chunksizes': (1, 2, 2)},
- 'bar': {'chunksizes': (1, 1, 1)},
- 'time': {'_FillValue': None,
- 'calendar': 'proleptic_gregorian',
- 'units': 'days since 2009-07-01 12:15:00'},
- 'time_bnds': {'_FillValue': None,
- 'calendar': 'proleptic_gregorian',
- 'units': 'days since 2009-07-01 12:15:00'}})
-
+ expected_dict = {
+ 'y': {'_FillValue': None},
+ 'x': {'_FillValue': None},
+ 'lon': {'chunksizes': (2, 2)},
+ 'foo': {'chunksizes': (1, 2, 2)},
+ 'bar': {'chunksizes': (1, 1, 1)},
+ 'time': {'_FillValue': None,
+ 'calendar': 'proleptic_gregorian',
+ 'units': 'days since 2009-07-01 12:15:00'},
+ 'time_bnds': {'_FillValue': None,
+ 'calendar': 'proleptic_gregorian',
+ 'units': 'days since 2009-07-01 12:15:00'}
+ }
+ assert enc == expected_dict
# User-defined encoding may not be altered
- self.assertDictEqual(kwargs['encoding'], {'bar': {'chunksizes': (1, 1, 1)}})
+ assert kwargs['encoding'] == {'bar': {'chunksizes': (1, 1, 1)}}
class TestEncodingKwarg:
@@ -1384,5 +1448,5 @@ def _should_use_compression_keyword():
versions = _get_backend_versions()
return (
versions["libnetcdf"] >= Version("4.9.0") and
- versions["xarray"] >= Version("2023.6")
+ versions["xarray"] >= Version("2023.7")
)
diff --git a/satpy/writers/cf/__init__.py b/satpy/writers/cf/__init__.py
new file mode 100644
index 0000000000..f597a9264c
--- /dev/null
+++ b/satpy/writers/cf/__init__.py
@@ -0,0 +1,3 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""Code for generation of CF-compliant datasets."""
diff --git a/satpy/writers/cf/coords_attrs.py b/satpy/writers/cf/coords_attrs.py
new file mode 100644
index 0000000000..c7e559adc2
--- /dev/null
+++ b/satpy/writers/cf/coords_attrs.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""Set CF-compliant attributes to x and y spatial dimensions."""
+
+import logging
+
+from satpy.writers.cf.crs import _is_projected
+
+logger = logging.getLogger(__name__)
+
+
+def add_xy_coords_attrs(dataarray):
+ """Add relevant attributes to x, y coordinates."""
+ # If there are no coords, return dataarray
+ if not dataarray.coords.keys() & {"x", "y", "crs"}:
+ return dataarray
+ # If projected area
+ if _is_projected(dataarray):
+ dataarray = _add_xy_projected_coords_attrs(dataarray)
+ else:
+ dataarray = _add_xy_geographic_coords_attrs(dataarray)
+ if 'crs' in dataarray.coords:
+ dataarray = dataarray.drop_vars('crs')
+ return dataarray
+
+
+def _add_xy_projected_coords_attrs(dataarray, x='x', y='y'):
+ """Add relevant attributes to x, y coordinates of a projected CRS."""
+ if x in dataarray.coords:
+ dataarray[x].attrs['standard_name'] = 'projection_x_coordinate'
+ dataarray[x].attrs['units'] = 'm'
+ if y in dataarray.coords:
+ dataarray[y].attrs['standard_name'] = 'projection_y_coordinate'
+ dataarray[y].attrs['units'] = 'm'
+ return dataarray
+
+
+def _add_xy_geographic_coords_attrs(dataarray, x='x', y='y'):
+ """Add relevant attributes to x, y coordinates of a geographic CRS."""
+ if x in dataarray.coords:
+ dataarray[x].attrs['standard_name'] = 'longitude'
+ dataarray[x].attrs['units'] = 'degrees_east'
+ if y in dataarray.coords:
+ dataarray[y].attrs['standard_name'] = 'latitude'
+ dataarray[y].attrs['units'] = 'degrees_north'
+ return dataarray
diff --git a/satpy/writers/cf/crs.py b/satpy/writers/cf/crs.py
new file mode 100644
index 0000000000..e6952a484f
--- /dev/null
+++ b/satpy/writers/cf/crs.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+"""CRS utility."""
+
+import logging
+from contextlib import suppress
+
+from pyresample.geometry import AreaDefinition, SwathDefinition
+
+logger = logging.getLogger(__name__)
+
+
+def _is_projected(dataarray):
+ """Guess whether data are projected or not."""
+ crs = _try_to_get_crs(dataarray)
+ if crs:
+ return crs.is_projected
+ units = _try_get_units_from_coords(dataarray)
+ if units:
+ if units.endswith("m"):
+ return True
+ if units.startswith("degrees"):
+ return False
+ logger.warning("Failed to tell if data are projected. Assuming yes.")
+ return True
+
+
+def _try_to_get_crs(dataarray):
+ """Try to get a CRS from attributes."""
+ if "area" in dataarray.attrs:
+ if isinstance(dataarray.attrs["area"], AreaDefinition):
+ return dataarray.attrs["area"].crs
+ if not isinstance(dataarray.attrs["area"], SwathDefinition):
+ logger.warning(
+ f"Could not tell CRS from area of type {type(dataarray.attrs['area']).__name__:s}. "
+ "Assuming projected CRS.")
+ if "crs" in dataarray.coords:
+ return dataarray.coords["crs"].item()
+
+
+def _try_get_units_from_coords(dataarray):
+ """Try to retrieve coordinate x/y units."""
+ for c in ["x", "y"]:
+ with suppress(KeyError):
+ # If the data has only 1 dimension, it has only one of x or y coords
+ if "units" in dataarray.coords[c].attrs:
+ return dataarray.coords[c].attrs["units"]
diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py
index 0cfb808425..b9a24b9292 100644
--- a/satpy/writers/cf_writer.py
+++ b/satpy/writers/cf_writer.py
@@ -44,7 +44,7 @@
coordinate is identical for all datasets, the prefix can be removed by setting ``pretty=True``.
* Some dataset names start with a digit, like AVHRR channels 1, 2, 3a, 3b, 4 and 5. This doesn't comply with CF
https://cfconventions.org/Data/cf-conventions/cf-conventions-1.7/build/ch02s03.html. These channels are prefixed
- with `CHANNEL_` by default. This can be controlled with the variable `numeric_name_prefix` to `save_datasets`.
+ with ``"CHANNEL_"`` by default. This can be controlled with the variable `numeric_name_prefix` to `save_datasets`.
Setting it to `None` or `''` will skip the prefixing.
Grouping
@@ -160,7 +160,6 @@
import logging
import warnings
from collections import OrderedDict, defaultdict
-from contextlib import suppress
from datetime import datetime
import numpy as np
@@ -171,6 +170,7 @@
from xarray.coding.times import CFDatetimeCoder
from satpy.writers import Writer
+from satpy.writers.cf.coords_attrs import add_xy_coords_attrs
from satpy.writers.utils import flatten_dict
logger = logging.getLogger(__name__)
@@ -203,6 +203,7 @@
np.string_]
# Unsigned and int64 isn't CF 1.7 compatible
+# Note: Unsigned and int64 are CF 1.9 compatible
CF_DTYPES = [np.dtype('int8'),
np.dtype('int16'),
np.dtype('int32'),
@@ -213,32 +214,26 @@
CF_VERSION = 'CF-1.7'
-def create_grid_mapping(area):
- """Create the grid mapping instance for `area`."""
- import pyproj
- if Version(pyproj.__version__) < Version('2.4.1'):
- # technically 2.2, but important bug fixes in 2.4.1
- raise ImportError("'cf' writer requires pyproj 2.4.1 or greater")
- # let pyproj do the heavily lifting
- # pyproj 2.0+ required
- grid_mapping = area.crs.to_cf()
- return area.area_id, grid_mapping
-
-
-def get_extra_ds(dataset, keys=None):
- """Get the extra datasets associated to *dataset*."""
+def get_extra_ds(dataarray, keys=None):
+ """Get the ancillary_variables DataArrays associated to a dataset."""
ds_collection = {}
- for ds in dataset.attrs.get('ancillary_variables', []):
- if keys and ds.name not in keys:
- keys.append(ds.name)
- ds_collection.update(get_extra_ds(ds, keys))
- ds_collection[dataset.attrs['name']] = dataset
-
+ # Retrieve ancillary variable datarrays
+ for ancillary_dataarray in dataarray.attrs.get('ancillary_variables', []):
+ ancillary_variable = ancillary_dataarray.name
+ if keys and ancillary_variable not in keys:
+ keys.append(ancillary_variable)
+ ds_collection.update(get_extra_ds(ancillary_dataarray, keys=keys))
+ # Add input dataarray
+ ds_collection[dataarray.attrs['name']] = dataarray
return ds_collection
-def area2lonlat(dataarray):
- """Convert an area to longitudes and latitudes."""
+# ###--------------------------------------------------------------------------.
+# ### CF-Area
+
+
+def add_lonlat_coords(dataarray):
+ """Add 'longitude' and 'latitude' coordinates to DataArray."""
dataarray = dataarray.copy()
area = dataarray.attrs['area']
ignore_dims = {dim: 0 for dim in dataarray.dims if dim not in ['x', 'y']}
@@ -257,36 +252,102 @@ def area2lonlat(dataarray):
return dataarray
-def area2gridmapping(dataarray):
+def _create_grid_mapping(area):
+ """Create the grid mapping instance for `area`."""
+ import pyproj
+
+ if Version(pyproj.__version__) < Version('2.4.1'):
+ # technically 2.2, but important bug fixes in 2.4.1
+ raise ImportError("'cf' writer requires pyproj 2.4.1 or greater")
+ # let pyproj do the heavily lifting (pyproj 2.0+ required)
+ grid_mapping = area.crs.to_cf()
+ return area.area_id, grid_mapping
+
+
+def _add_grid_mapping(dataarray):
"""Convert an area to at CF grid mapping."""
dataarray = dataarray.copy()
area = dataarray.attrs['area']
- gmapping_var_name, attrs = create_grid_mapping(area)
+ gmapping_var_name, attrs = _create_grid_mapping(area)
dataarray.attrs['grid_mapping'] = gmapping_var_name
return dataarray, xr.DataArray(0, attrs=attrs, name=gmapping_var_name)
-def area2cf(dataarray, strict=False, got_lonlats=False):
+def area2cf(dataarray, include_lonlats=False, got_lonlats=False):
"""Convert an area to at CF grid mapping or lon and lats."""
res = []
- if not got_lonlats and (isinstance(dataarray.attrs['area'], SwathDefinition) or strict):
- dataarray = area2lonlat(dataarray)
+ if not got_lonlats and (isinstance(dataarray.attrs['area'], SwathDefinition) or include_lonlats):
+ dataarray = add_lonlat_coords(dataarray)
if isinstance(dataarray.attrs['area'], AreaDefinition):
- dataarray, gmapping = area2gridmapping(dataarray)
+ dataarray, gmapping = _add_grid_mapping(dataarray)
res.append(gmapping)
res.append(dataarray)
return res
-def make_time_bounds(start_times, end_times):
- """Create time bounds for the current *dataarray*."""
- start_time = min(start_time for start_time in start_times
- if start_time is not None)
- end_time = min(end_time for end_time in end_times
- if end_time is not None)
- data = xr.DataArray([[np.datetime64(start_time), np.datetime64(end_time)]],
- dims=['time', 'bnds_1d'])
- return data
+def is_lon_or_lat_dataarray(dataarray):
+ """Check if the DataArray represents the latitude or longitude coordinate."""
+ if 'standard_name' in dataarray.attrs and dataarray.attrs['standard_name'] in ['longitude', 'latitude']:
+ return True
+ return False
+
+
+def has_projection_coords(ds_collection):
+ """Check if DataArray collection has a "longitude" or "latitude" DataArray."""
+ for dataarray in ds_collection.values():
+ if is_lon_or_lat_dataarray(dataarray):
+ return True
+ return False
+
+
+def make_alt_coords_unique(datas, pretty=False):
+ """Make non-dimensional coordinates unique among all datasets.
+
+ Non-dimensional (or alternative) coordinates, such as scanline timestamps,
+ may occur in multiple datasets with the same name and dimension
+ but different values.
+
+ In order to avoid conflicts, prepend the dataset name to the coordinate name.
+ If a non-dimensional coordinate is unique among all datasets and ``pretty=True``,
+ its name will not be modified.
+
+ Since all datasets must have the same projection coordinates,
+ this is not applied to latitude and longitude.
+
+ Args:
+ datas (dict):
+ Dictionary of (dataset name, dataset)
+ pretty (bool):
+ Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent.
+
+ Returns:
+ Dictionary holding the updated datasets
+
+ """
+ # Determine which non-dimensional coordinates are unique
+ tokens = defaultdict(set)
+ for dataset in datas.values():
+ for coord_name in dataset.coords:
+ if not is_lon_or_lat_dataarray(dataset[coord_name]) and coord_name not in dataset.dims:
+ tokens[coord_name].add(tokenize(dataset[coord_name].data))
+ coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()])
+
+ # Prepend dataset name, if not unique or no pretty-format desired
+ new_datas = datas.copy()
+ for coord_name, unique in coords_unique.items():
+ if not pretty or not unique:
+ if pretty:
+ warnings.warn(
+ 'Cannot pretty-format "{}" coordinates because they are '
+ 'not identical among the given datasets'.format(coord_name),
+ stacklevel=2
+ )
+ for ds_name, dataset in datas.items():
+ if coord_name in dataset.coords:
+ rename = {coord_name: '{}_{}'.format(ds_name, coord_name)}
+ new_datas[ds_name] = new_datas[ds_name].rename(rename)
+
+ return new_datas
def assert_xy_unique(datas):
@@ -335,65 +396,52 @@ def link_coords(datas):
data.attrs.pop('coordinates', None)
-def dataset_is_projection_coords(dataset):
- """Check if dataset is a projection coords."""
- if 'standard_name' in dataset.attrs and dataset.attrs['standard_name'] in ['longitude', 'latitude']:
- return True
- return False
+# ###--------------------------------------------------------------------------.
+# ### CF-Time
+def add_time_bounds_dimension(ds, time="time"):
+ """Add time bound dimension to xr.Dataset."""
+ start_times = []
+ end_times = []
+ for _var_name, data_array in ds.items():
+ start_times.append(data_array.attrs.get("start_time", None))
+ end_times.append(data_array.attrs.get("end_time", None))
+ start_time = min(start_time for start_time in start_times
+ if start_time is not None)
+ end_time = min(end_time for end_time in end_times
+ if end_time is not None)
+ ds['time_bnds'] = xr.DataArray([[np.datetime64(start_time),
+ np.datetime64(end_time)]],
+ dims=['time', 'bnds_1d'])
+ ds[time].attrs['bounds'] = "time_bnds"
+ ds[time].attrs['standard_name'] = "time"
+ return ds
-def has_projection_coords(ds_collection):
- """Check if collection has a projection coords among data arrays."""
- for dataset in ds_collection.values():
- if dataset_is_projection_coords(dataset):
- return True
- return False
+def _process_time_coord(dataarray, epoch):
+ """Process the 'time' coordinate, if existing.
-def make_alt_coords_unique(datas, pretty=False):
- """Make non-dimensional coordinates unique among all datasets.
+ If expand the DataArray with a time dimension if does not yet exists.
- Non-dimensional (or alternative) coordinates, such as scanline timestamps, may occur in multiple datasets with
- the same name and dimension but different values. In order to avoid conflicts, prepend the dataset name to the
- coordinate name. If a non-dimensional coordinate is unique among all datasets and ``pretty=True``, its name will not
- be modified.
+ The function assumes
- Since all datasets must have the same projection coordinates, this is not applied to latitude and longitude.
+ - that x and y dimensions have at least shape > 1
+ - the time coordinate has size 1
- Args:
- datas (dict):
- Dictionary of (dataset name, dataset)
- pretty (bool):
- Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent.
+ """
+ if 'time' in dataarray.coords:
+ dataarray['time'].encoding['units'] = epoch
+ dataarray['time'].attrs['standard_name'] = 'time'
+ dataarray['time'].attrs.pop('bounds', None)
- Returns:
- Dictionary holding the updated datasets
+ if 'time' not in dataarray.dims and dataarray["time"].size not in dataarray.shape:
+ dataarray = dataarray.expand_dims('time')
- """
- # Determine which non-dimensional coordinates are unique
- tokens = defaultdict(set)
- for dataset in datas.values():
- for coord_name in dataset.coords:
- if not dataset_is_projection_coords(dataset[coord_name]) and coord_name not in dataset.dims:
- tokens[coord_name].add(tokenize(dataset[coord_name].data))
- coords_unique = dict([(coord_name, len(tokens) == 1) for coord_name, tokens in tokens.items()])
+ return dataarray
- # Prepend dataset name, if not unique or no pretty-format desired
- new_datas = datas.copy()
- for coord_name, unique in coords_unique.items():
- if not pretty or not unique:
- if pretty:
- warnings.warn(
- 'Cannot pretty-format "{}" coordinates because they are '
- 'not identical among the given datasets'.format(coord_name),
- stacklevel=2
- )
- for ds_name, dataset in datas.items():
- if coord_name in dataset.coords:
- rename = {coord_name: '{}_{}'.format(ds_name, coord_name)}
- new_datas[ds_name] = new_datas[ds_name].rename(rename)
- return new_datas
+# --------------------------------------------------------------------------.
+# ### Attributes
class AttributeEncoder(json.JSONEncoder):
@@ -502,6 +550,86 @@ def encode_attrs_nc(attrs):
return OrderedDict(encoded_attrs)
+def _add_ancillary_variables_attrs(dataarray):
+ """Replace ancillary_variables DataArray with a list of their name."""
+ list_ancillary_variable_names = [da_ancillary.attrs['name']
+ for da_ancillary in dataarray.attrs.get('ancillary_variables', [])]
+ if list_ancillary_variable_names:
+ dataarray.attrs['ancillary_variables'] = ' '.join(list_ancillary_variable_names)
+ else:
+ dataarray.attrs.pop("ancillary_variables", None)
+ return dataarray
+
+
+def _drop_exclude_attrs(dataarray, exclude_attrs):
+ """Remove user-specified list of attributes."""
+ if exclude_attrs is None:
+ exclude_attrs = []
+ for key in exclude_attrs:
+ dataarray.attrs.pop(key, None)
+ return dataarray
+
+
+def _remove_satpy_attrs(new_data):
+ """Remove _satpy attribute."""
+ satpy_attrs = [key for key in new_data.attrs if key.startswith('_satpy')]
+ for satpy_attr in satpy_attrs:
+ new_data.attrs.pop(satpy_attr)
+ new_data.attrs.pop('_last_resampler', None)
+ return new_data
+
+
+def _format_prerequisites_attrs(dataarray):
+ """Reformat prerequisites attribute value to string."""
+ if 'prerequisites' in dataarray.attrs:
+ dataarray.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in dataarray.attrs['prerequisites']]
+ return dataarray
+
+
+def _remove_none_attrs(dataarray):
+ """Remove attribute keys with None value."""
+ for key, val in dataarray.attrs.copy().items():
+ if val is None:
+ dataarray.attrs.pop(key)
+ return dataarray
+
+
+def preprocess_datarray_attrs(dataarray, flatten_attrs, exclude_attrs):
+ """Preprocess DataArray attributes to be written into CF-compliant netCDF/Zarr."""
+ dataarray = _remove_satpy_attrs(dataarray)
+ dataarray = _add_ancillary_variables_attrs(dataarray)
+ dataarray = _drop_exclude_attrs(dataarray, exclude_attrs)
+ dataarray = _format_prerequisites_attrs(dataarray)
+ dataarray = _remove_none_attrs(dataarray)
+ _ = dataarray.attrs.pop("area", None)
+
+ if 'long_name' not in dataarray.attrs and 'standard_name' not in dataarray.attrs:
+ dataarray.attrs['long_name'] = dataarray.name
+
+ if flatten_attrs:
+ dataarray.attrs = flatten_dict(dataarray.attrs)
+
+ dataarray.attrs = encode_attrs_nc(dataarray.attrs)
+
+ return dataarray
+
+
+def preprocess_header_attrs(header_attrs, flatten_attrs=False):
+ """Prepare file header attributes."""
+ if header_attrs is not None:
+ if flatten_attrs:
+ header_attrs = flatten_dict(header_attrs)
+ header_attrs = encode_attrs_nc(header_attrs) # OrderedDict
+ else:
+ header_attrs = {}
+ header_attrs = _add_history(header_attrs)
+ return header_attrs
+
+
+# ###--------------------------------------------------------------------------.
+# ### netCDF encodings
+
+
def _set_default_chunks(encoding, dataset):
"""Update encoding to preserve current dask chunks.
@@ -515,6 +643,7 @@ def _set_default_chunks(encoding, dataset):
) # Chunksize may not exceed shape
encoding.setdefault(var_name, {})
encoding[var_name].setdefault('chunksizes', chunks)
+ return encoding
def _set_default_fill_value(encoding, dataset):
@@ -529,13 +658,15 @@ def _set_default_fill_value(encoding, dataset):
for coord_var in coord_vars:
encoding.setdefault(coord_var, {})
encoding[coord_var].update({'_FillValue': None})
+ return encoding
def _set_default_time_encoding(encoding, dataset):
"""Set default time encoding.
- Make sure time coordinates and bounds have the same units. Default is xarray's CF datetime
- encoding, which can be overridden by user-defined encoding.
+ Make sure time coordinates and bounds have the same units.
+ Default is xarray's CF datetime encoding, which can be overridden
+ by user-defined encoding.
"""
if 'time' in dataset:
try:
@@ -551,21 +682,25 @@ def _set_default_time_encoding(encoding, dataset):
'_FillValue': None}
encoding['time'] = time_enc
encoding['time_bnds'] = bounds_enc # FUTURE: Not required anymore with xarray-0.14+
+ return encoding
-def _set_encoding_dataset_names(encoding, dataset, numeric_name_prefix):
- """Set Netcdf variable names encoding according to numeric_name_prefix.
+def _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix):
+ """Ensure variable names of the encoding dictionary account for numeric_name_prefix.
- A lot of channel names in satpy starts with a digit. When writing data with the satpy_cf_nc
- these channels are prepended with numeric_name_prefix.
- This ensures this is also done with any matching variables in encoding.
+ A lot of channel names in satpy starts with a digit.
+ When preparing CF-compliant datasets, these channels are prefixed with numeric_name_prefix.
+
+ If variables names in the encoding dictionary are numeric digits, their name is prefixed
+ with numeric_name_prefix
"""
- for _var_name, _variable in dataset.variables.items():
- if not numeric_name_prefix or not _var_name.startswith(numeric_name_prefix):
+ for var_name in list(dataset.variables):
+ if not numeric_name_prefix or not var_name.startswith(numeric_name_prefix):
continue
- _orig_var_name = _var_name.replace(numeric_name_prefix, '')
- if _orig_var_name in encoding:
- encoding[_var_name] = encoding.pop(_orig_var_name)
+ orig_var_name = var_name.replace(numeric_name_prefix, '')
+ if orig_var_name in encoding:
+ encoding[var_name] = encoding.pop(orig_var_name)
+ return encoding
def update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix='CHANNEL_'):
@@ -576,54 +711,341 @@ def update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix='CHANNEL_'):
"""
other_to_netcdf_kwargs = to_netcdf_kwargs.copy()
encoding = other_to_netcdf_kwargs.pop('encoding', {}).copy()
+ encoding = _update_encoding_dataset_names(encoding, dataset, numeric_name_prefix)
+ encoding = _set_default_chunks(encoding, dataset)
+ encoding = _set_default_fill_value(encoding, dataset)
+ encoding = _set_default_time_encoding(encoding, dataset)
+ return encoding, other_to_netcdf_kwargs
- _set_encoding_dataset_names(encoding, dataset, numeric_name_prefix)
- _set_default_chunks(encoding, dataset)
- _set_default_fill_value(encoding, dataset)
- _set_default_time_encoding(encoding, dataset)
- return encoding, other_to_netcdf_kwargs
+# ###--------------------------------------------------------------------------.
+# ### CF-conversion
def _handle_dataarray_name(original_name, numeric_name_prefix):
- name = original_name
- if name[0].isdigit():
+ if original_name[0].isdigit():
if numeric_name_prefix:
- name = numeric_name_prefix + original_name
+ new_name = numeric_name_prefix + original_name
else:
warnings.warn(
- 'Invalid NetCDF dataset name: {} starts with a digit.'.format(name),
+ f'Invalid NetCDF dataset name: {original_name} starts with a digit.',
stacklevel=5
)
- return original_name, name
+ new_name = original_name # occurs when numeric_name_prefix = '', None or False
+ else:
+ new_name = original_name
+ return original_name, new_name
+
+
+def _preprocess_dataarray_name(dataarray, numeric_name_prefix, include_orig_name):
+ """Change the DataArray name by prepending numeric_name_prefix if the name is a digit."""
+ original_name = None
+ dataarray = dataarray.copy()
+ if 'name' in dataarray.attrs:
+ original_name = dataarray.attrs.pop('name')
+ original_name, new_name = _handle_dataarray_name(original_name, numeric_name_prefix)
+ dataarray = dataarray.rename(new_name)
+
+ if include_orig_name and numeric_name_prefix and original_name and original_name != new_name:
+ dataarray.attrs['original_name'] = original_name
+
+ return dataarray
-def _set_history(root):
+def _add_history(attrs):
+ """Add 'history' attribute to dictionary."""
_history_create = 'Created by pytroll/satpy on {}'.format(datetime.utcnow())
- if 'history' in root.attrs:
- if isinstance(root.attrs['history'], list):
- root.attrs['history'] = ''.join(root.attrs['history'])
- root.attrs['history'] += '\n' + _history_create
+ if 'history' in attrs:
+ if isinstance(attrs['history'], list):
+ attrs['history'] = ''.join(attrs['history'])
+ attrs['history'] += '\n' + _history_create
else:
- root.attrs['history'] = _history_create
+ attrs['history'] = _history_create
+ return attrs
+
+def _get_groups(groups, list_datarrays):
+ """Return a dictionary with the list of xr.DataArray associated to each group.
-def _get_groups(groups, datasets, root):
+ If no groups (groups=None), return all DataArray attached to a single None key.
+ Else, collect the DataArrays associated to each group.
+ """
if groups is None:
- # Groups are not CF-1.7 compliant
- if 'Conventions' not in root.attrs:
- root.attrs['Conventions'] = CF_VERSION
- # Write all datasets to the file root without creating a group
- groups_ = {None: datasets}
+ grouped_dataarrays = {None: list_datarrays}
else:
- # User specified a group assignment using dataset names. Collect the corresponding datasets.
- groups_ = defaultdict(list)
- for dataset in datasets:
+ grouped_dataarrays = defaultdict(list)
+ for datarray in list_datarrays:
for group_name, group_members in groups.items():
- if dataset.attrs['name'] in group_members:
- groups_[group_name].append(dataset)
+ if datarray.attrs['name'] in group_members:
+ grouped_dataarrays[group_name].append(datarray)
break
- return groups_
+ return grouped_dataarrays
+
+
+def make_cf_dataarray(dataarray,
+ epoch=EPOCH,
+ flatten_attrs=False,
+ exclude_attrs=None,
+ include_orig_name=True,
+ numeric_name_prefix='CHANNEL_'):
+ """Make the xr.DataArray CF-compliant.
+
+ Parameters
+ ----------
+ dataarray : xr.DataArray
+ The data array to be made CF-compliant.
+ epoch : str, optional
+ Reference time for encoding of time coordinates.
+ flatten_attrs : bool, optional
+ If True, flatten dict-type attributes.
+ The default is False.
+ exclude_attrs : list, optional
+ List of dataset attributes to be excluded.
+ The default is None.
+ include_orig_name : bool, optional
+ Include the original dataset name in the netcdf variable attributes.
+ The default is True.
+ numeric_name_prefix : TYPE, optional
+ Prepend dataset name with this if starting with a digit.
+ The default is ``"CHANNEL_"``.
+
+ Returns
+ -------
+ new_data : xr.DataArray
+ CF-compliant xr.DataArray.
+
+ """
+ dataarray = _preprocess_dataarray_name(dataarray=dataarray,
+ numeric_name_prefix=numeric_name_prefix,
+ include_orig_name=include_orig_name)
+ dataarray = preprocess_datarray_attrs(dataarray=dataarray,
+ flatten_attrs=flatten_attrs,
+ exclude_attrs=exclude_attrs)
+ dataarray = add_xy_coords_attrs(dataarray)
+ dataarray = _process_time_coord(dataarray, epoch=epoch)
+ return dataarray
+
+
+def _collect_cf_dataset(list_dataarrays,
+ epoch=EPOCH,
+ flatten_attrs=False,
+ exclude_attrs=None,
+ include_lonlats=True,
+ pretty=False,
+ include_orig_name=True,
+ numeric_name_prefix='CHANNEL_'):
+ """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Dataset.
+
+ Parameters
+ ----------
+ list_dataarrays : list
+ List of DataArrays to make CF compliant and merge into a xr.Dataset.
+ epoch : str
+ Reference time for encoding the time coordinates (if available).
+ Example format: "seconds since 1970-01-01 00:00:00".
+ If None, the default reference time is retrieved using `from satpy.cf_writer import EPOCH`
+ flatten_attrs : bool, optional
+ If True, flatten dict-type attributes.
+ exclude_attrs : list, optional
+ List of xr.DataArray attribute names to be excluded.
+ include_lonlats : bool, optional
+ If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition.
+ If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates.
+ pretty : bool, optional
+ Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent.
+ include_orig_name : bool, optional
+ Include the original dataset name as a variable attribute in the xr.Dataset.
+ numeric_name_prefix : str, optional
+ Prefix to add the each variable with name starting with a digit.
+ Use '' or None to leave this out.
+
+ Returns
+ -------
+ ds : xr.Dataset
+ A partially CF-compliant xr.Dataset
+ """
+ # Create dictionary of input datarrays
+ # --> Since keys=None, it doesn't never retrieve ancillary variables !!!
+ ds_collection = {}
+ for dataarray in list_dataarrays:
+ ds_collection.update(get_extra_ds(dataarray))
+
+ # Check if one DataArray in the collection has 'longitude' or 'latitude'
+ got_lonlats = has_projection_coords(ds_collection)
+
+ # Sort dictionary by keys name
+ ds_collection = dict(sorted(ds_collection.items()))
+
+ dict_dataarrays = {}
+ for dataarray in ds_collection.values():
+ dataarray_type = dataarray.dtype
+ if dataarray_type not in CF_DTYPES:
+ warnings.warn(
+ f'dtype {dataarray_type} not compatible with {CF_VERSION}.',
+ stacklevel=3
+ )
+ # Deep copy the datarray since adding/modifying attributes and coordinates
+ dataarray = dataarray.copy(deep=True)
+
+ # Add CF-compliant area information from the pyresample area
+ # - If include_lonlats=True, add latitude and longitude coordinates
+ # - Add grid_mapping attribute to the DataArray
+ # - Return the CRS DataArray as first list element
+ # - Return the CF-compliant input DataArray as second list element
+ try:
+ list_new_dataarrays = area2cf(dataarray,
+ include_lonlats=include_lonlats,
+ got_lonlats=got_lonlats)
+ except KeyError:
+ list_new_dataarrays = [dataarray]
+
+ # Ensure each DataArray is CF-compliant
+ # --> NOTE: Here the CRS DataArray is repeatedly overwrited
+ # --> NOTE: If the input list_dataarrays have different pyresample areas with the same name
+ # area information can be lost here !!!
+ for new_dataarray in list_new_dataarrays:
+ new_dataarray = make_cf_dataarray(new_dataarray,
+ epoch=epoch,
+ flatten_attrs=flatten_attrs,
+ exclude_attrs=exclude_attrs,
+ include_orig_name=include_orig_name,
+ numeric_name_prefix=numeric_name_prefix)
+ dict_dataarrays[new_dataarray.name] = new_dataarray
+
+ # Check all DataArray have same size
+ assert_xy_unique(dict_dataarrays)
+
+ # Deal with the 'coordinates' attributes indicating lat/lon coords
+ # NOTE: this currently is dropped by default !!!
+ link_coords(dict_dataarrays)
+
+ # Ensure non-dimensional coordinates to be unique across DataArrays
+ # --> If not unique, prepend the DataArray name to the coordinate
+ # --> If unique, does not prepend the DataArray name only if pretty=True
+ # --> 'longitude' and 'latitude' coordinates are not prepended
+ dict_dataarrays = make_alt_coords_unique(dict_dataarrays, pretty=pretty)
+
+ # Create a xr.Dataset
+ ds = xr.Dataset(dict_dataarrays)
+ return ds
+
+
+def collect_cf_datasets(list_dataarrays,
+ header_attrs=None,
+ exclude_attrs=None,
+ flatten_attrs=False,
+ pretty=True,
+ include_lonlats=True,
+ epoch=EPOCH,
+ include_orig_name=True,
+ numeric_name_prefix='CHANNEL_',
+ groups=None):
+ """Process a list of xr.DataArray and return a dictionary with CF-compliant xr.Datasets.
+
+ If the xr.DataArrays does not share the same dimensions, it creates a collection
+ of xr.Datasets sharing the same dimensions.
+
+ Parameters
+ ----------
+ list_dataarrays (list):
+ List of DataArrays to make CF compliant and merge into groups of xr.Datasets.
+ header_attrs: (dict):
+ Global attributes of the output xr.Dataset.
+ epoch (str):
+ Reference time for encoding the time coordinates (if available).
+ Example format: "seconds since 1970-01-01 00:00:00".
+ If None, the default reference time is retrieved using `from satpy.cf_writer import EPOCH`
+ flatten_attrs (bool):
+ If True, flatten dict-type attributes.
+ exclude_attrs (list):
+ List of xr.DataArray attribute names to be excluded.
+ include_lonlats (bool):
+ If True, it includes 'latitude' and 'longitude' coordinates also for satpy scene defined on an AreaDefinition.
+ If the 'area' attribute is a SwathDefinition, it always include latitude and longitude coordinates.
+ pretty (bool):
+ Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent.
+ include_orig_name (bool).
+ Include the original dataset name as a variable attribute in the xr.Dataset.
+ numeric_name_prefix (str):
+ Prefix to add the each variable with name starting with a digit.
+ Use '' or None to leave this out.
+ groups (dict):
+ Group datasets according to the given assignment:
+
+ `{'': ['dataset_name1', 'dataset_name2', ...]}`
+
+ It is used to create grouped netCDFs using the CF_Writer.
+ If None (the default), no groups will be created.
+
+ Returns
+ -------
+ grouped_datasets : dict
+ A dictionary of CF-compliant xr.Dataset: {group_name: xr.Dataset}
+ header_attrs : dict
+ Global attributes to be attached to the xr.Dataset / netCDF4.
+ """
+ if not list_dataarrays:
+ raise RuntimeError("None of the requested datasets have been "
+ "generated or could not be loaded. Requested "
+ "composite inputs may need to have matching "
+ "dimensions (eg. through resampling).")
+
+ header_attrs = preprocess_header_attrs(header_attrs=header_attrs,
+ flatten_attrs=flatten_attrs)
+
+ # Retrieve groups
+ # - If groups is None: {None: list_dataarrays}
+ # - if groups not None: {group_name: [xr.DataArray, xr.DataArray ,..], ...}
+ # Note: if all dataset names are wrong, behave like groups = None !
+ grouped_dataarrays = _get_groups(groups, list_dataarrays)
+ is_grouped = len(grouped_dataarrays) >= 2
+
+ # If not grouped, add CF conventions.
+ # - If 'Conventions' key already present, do not overwrite !
+ if "Conventions" not in header_attrs and not is_grouped:
+ header_attrs['Conventions'] = CF_VERSION
+
+ # Create dictionary of group xr.Datasets
+ # --> If no groups (groups=None) --> group_name=None
+ grouped_datasets = {}
+ for group_name, group_dataarrays in grouped_dataarrays.items():
+ ds = _collect_cf_dataset(
+ list_dataarrays=group_dataarrays,
+ epoch=epoch,
+ flatten_attrs=flatten_attrs,
+ exclude_attrs=exclude_attrs,
+ include_lonlats=include_lonlats,
+ pretty=pretty,
+ include_orig_name=include_orig_name,
+ numeric_name_prefix=numeric_name_prefix)
+
+ if not is_grouped:
+ ds.attrs = header_attrs
+
+ if 'time' in ds:
+ ds = add_time_bounds_dimension(ds, time="time")
+
+ grouped_datasets[group_name] = ds
+ return grouped_datasets, header_attrs
+
+
+def _sanitize_writer_kwargs(writer_kwargs):
+ """Remove satpy-specific kwargs."""
+ writer_kwargs = copy.deepcopy(writer_kwargs)
+ satpy_kwargs = ['overlay', 'decorate', 'config_files']
+ for kwarg in satpy_kwargs:
+ writer_kwargs.pop(kwarg, None)
+ return writer_kwargs
+
+
+def _initialize_root_netcdf(filename, engine, header_attrs, to_netcdf_kwargs):
+ """Initialize root empty netCDF."""
+ root = xr.Dataset({}, attrs=header_attrs)
+ init_nc_kwargs = to_netcdf_kwargs.copy()
+ init_nc_kwargs.pop('encoding', None) # No variables to be encoded at this point
+ init_nc_kwargs.pop('unlimited_dims', None)
+ written = [root.to_netcdf(filename, engine=engine, mode='w', **init_nc_kwargs)]
+ return written
class CFWriter(Writer):
@@ -648,198 +1070,28 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None,
numeric_name_prefix (str):
Prepend dataset name with this if starting with a digit
"""
- if exclude_attrs is None:
- exclude_attrs = []
-
- original_name = None
- new_data = dataarray.copy()
- if 'name' in new_data.attrs:
- name = new_data.attrs.pop('name')
- original_name, name = _handle_dataarray_name(name, numeric_name_prefix)
- new_data = new_data.rename(name)
-
- CFWriter._remove_satpy_attributes(new_data)
-
- new_data = CFWriter._encode_time(new_data, epoch)
- new_data = CFWriter._encode_coords(new_data)
-
- # Remove area as well as user-defined attributes
- for key in ['area'] + exclude_attrs:
- new_data.attrs.pop(key, None)
-
- anc = [ds.attrs['name']
- for ds in new_data.attrs.get('ancillary_variables', [])]
- if anc:
- new_data.attrs['ancillary_variables'] = ' '.join(anc)
- # TODO: make this a grid mapping or lon/lats
- # new_data.attrs['area'] = str(new_data.attrs.get('area'))
- CFWriter._cleanup_attrs(new_data)
-
- if 'long_name' not in new_data.attrs and 'standard_name' not in new_data.attrs:
- new_data.attrs['long_name'] = new_data.name
- if 'prerequisites' in new_data.attrs:
- new_data.attrs['prerequisites'] = [np.string_(str(prereq)) for prereq in new_data.attrs['prerequisites']]
-
- if include_orig_name and numeric_name_prefix and original_name and original_name != name:
- new_data.attrs['original_name'] = original_name
-
- # Flatten dict-type attributes, if desired
- if flatten_attrs:
- new_data.attrs = flatten_dict(new_data.attrs)
-
- # Encode attributes to netcdf-compatible datatype
- new_data.attrs = encode_attrs_nc(new_data.attrs)
-
- return new_data
+ warnings.warn('CFWriter.da2cf is deprecated.'
+ 'Use satpy.writers.cf_writer.make_cf_dataarray instead.',
+ DeprecationWarning, stacklevel=3)
+ return make_cf_dataarray(dataarray=dataarray,
+ epoch=epoch,
+ flatten_attrs=flatten_attrs,
+ exclude_attrs=exclude_attrs,
+ include_orig_name=include_orig_name,
+ numeric_name_prefix=numeric_name_prefix)
@staticmethod
- def _cleanup_attrs(new_data):
- for key, val in new_data.attrs.copy().items():
- if val is None:
- new_data.attrs.pop(key)
- if key == 'ancillary_variables' and val == []:
- new_data.attrs.pop(key)
-
- @staticmethod
- def _encode_coords(new_data):
- """Encode coordinates."""
- if not new_data.coords.keys() & {"x", "y", "crs"}:
- # there are no coordinates
- return new_data
- is_projected = CFWriter._is_projected(new_data)
- if is_projected:
- new_data = CFWriter._encode_xy_coords_projected(new_data)
- else:
- new_data = CFWriter._encode_xy_coords_geographic(new_data)
- if 'crs' in new_data.coords:
- new_data = new_data.drop_vars('crs')
- return new_data
-
- @staticmethod
- def _is_projected(new_data):
- """Guess whether data are projected or not."""
- crs = CFWriter._try_to_get_crs(new_data)
- if crs:
- return crs.is_projected
- units = CFWriter._try_get_units_from_coords(new_data)
- if units:
- if units.endswith("m"):
- return True
- if units.startswith("degrees"):
- return False
- logger.warning("Failed to tell if data are projected. Assuming yes.")
- return True
-
- @staticmethod
- def _try_to_get_crs(new_data):
- """Try to get a CRS from attributes."""
- if "area" in new_data.attrs:
- if isinstance(new_data.attrs["area"], AreaDefinition):
- return new_data.attrs["area"].crs
- if not isinstance(new_data.attrs["area"], SwathDefinition):
- logger.warning(
- f"Could not tell CRS from area of type {type(new_data.attrs['area']).__name__:s}. "
- "Assuming projected CRS.")
- if "crs" in new_data.coords:
- return new_data.coords["crs"].item()
-
- @staticmethod
- def _try_get_units_from_coords(new_data):
- for c in "xy":
- with suppress(KeyError):
- # If the data has only 1 dimension, it has only one of x or y coords
- if "units" in new_data.coords[c].attrs:
- return new_data.coords[c].attrs["units"]
-
- @staticmethod
- def _encode_xy_coords_projected(new_data):
- """Encode coordinates, assuming projected CRS."""
- if 'x' in new_data.coords:
- new_data['x'].attrs['standard_name'] = 'projection_x_coordinate'
- new_data['x'].attrs['units'] = 'm'
- if 'y' in new_data.coords:
- new_data['y'].attrs['standard_name'] = 'projection_y_coordinate'
- new_data['y'].attrs['units'] = 'm'
- return new_data
-
- @staticmethod
- def _encode_xy_coords_geographic(new_data):
- """Encode coordinates, assuming geographic CRS."""
- if 'x' in new_data.coords:
- new_data['x'].attrs['standard_name'] = 'longitude'
- new_data['x'].attrs['units'] = 'degrees_east'
- if 'y' in new_data.coords:
- new_data['y'].attrs['standard_name'] = 'latitude'
- new_data['y'].attrs['units'] = 'degrees_north'
- return new_data
-
- @staticmethod
- def _encode_time(new_data, epoch):
- if 'time' in new_data.coords:
- new_data['time'].encoding['units'] = epoch
- new_data['time'].attrs['standard_name'] = 'time'
- new_data['time'].attrs.pop('bounds', None)
- new_data = CFWriter._add_time_dimension(new_data)
- return new_data
-
- @staticmethod
- def _add_time_dimension(new_data):
- if 'time' not in new_data.dims and new_data["time"].size not in new_data.shape:
- new_data = new_data.expand_dims('time')
- return new_data
-
- @staticmethod
- def _remove_satpy_attributes(new_data):
- # Remove _satpy* attributes
- satpy_attrs = [key for key in new_data.attrs if key.startswith('_satpy')]
- for satpy_attr in satpy_attrs:
- new_data.attrs.pop(satpy_attr)
- new_data.attrs.pop('_last_resampler', None)
+ def update_encoding(dataset, to_netcdf_kwargs):
+ """Update encoding info (deprecated)."""
+ warnings.warn('CFWriter.update_encoding is deprecated. '
+ 'Use satpy.writers.cf_writer.update_encoding instead.',
+ DeprecationWarning, stacklevel=3)
+ return update_encoding(dataset, to_netcdf_kwargs)
def save_dataset(self, dataset, filename=None, fill_value=None, **kwargs):
"""Save the *dataset* to a given *filename*."""
return self.save_datasets([dataset], filename, **kwargs)
- def _collect_datasets(self, datasets, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, include_lonlats=True,
- pretty=False, include_orig_name=True, numeric_name_prefix='CHANNEL_'):
- """Collect and prepare datasets to be written."""
- ds_collection = {}
- for ds in datasets:
- ds_collection.update(get_extra_ds(ds))
- got_lonlats = has_projection_coords(ds_collection)
- datas = {}
- start_times = []
- end_times = []
- # sort by name, but don't use the name
- for _, ds in sorted(ds_collection.items()):
- if ds.dtype not in CF_DTYPES:
- warnings.warn(
- 'Dtype {} not compatible with {}.'.format(str(ds.dtype), CF_VERSION),
- stacklevel=3
- )
- # we may be adding attributes, coordinates, or modifying the
- # structure of attributes
- ds = ds.copy(deep=True)
- try:
- new_datasets = area2cf(ds, strict=include_lonlats, got_lonlats=got_lonlats)
- except KeyError:
- new_datasets = [ds]
- for new_ds in new_datasets:
- start_times.append(new_ds.attrs.get("start_time", None))
- end_times.append(new_ds.attrs.get("end_time", None))
- new_var = self.da2cf(new_ds, epoch=epoch, flatten_attrs=flatten_attrs,
- exclude_attrs=exclude_attrs,
- include_orig_name=include_orig_name,
- numeric_name_prefix=numeric_name_prefix)
- datas[new_var.name] = new_var
-
- # Check and prepare coordinates
- assert_xy_unique(datas)
- link_coords(datas)
- datas = make_alt_coords_unique(datas, pretty=pretty)
-
- return datas, start_times, end_times
-
def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None, engine=None, epoch=EPOCH,
flatten_attrs=False, exclude_attrs=None, include_lonlats=True, pretty=False,
include_orig_name=True, numeric_name_prefix='CHANNEL_', **to_netcdf_kwargs):
@@ -849,31 +1101,31 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None,
Args:
datasets (list):
- Datasets to be saved
+ List of xr.DataArray to be saved.
filename (str):
Output file
groups (dict):
Group datasets according to the given assignment: `{'group_name': ['dataset1', 'dataset2', ...]}`.
- Group name `None` corresponds to the root of the file, i.e. no group will be created. Warning: The
- results will not be fully CF compliant!
+ Group name `None` corresponds to the root of the file, i.e. no group will be created.
+ Warning: The results will not be fully CF compliant!
header_attrs:
- Global attributes to be included
+ Global attributes to be included.
engine (str):
Module to be used for writing netCDF files. Follows xarray's
:meth:`~xarray.Dataset.to_netcdf` engine choices with a
preference for 'netcdf4'.
epoch (str):
- Reference time for encoding of time coordinates
+ Reference time for encoding of time coordinates.
flatten_attrs (bool):
- If True, flatten dict-type attributes
+ If True, flatten dict-type attributes.
exclude_attrs (list):
- List of dataset attributes to be excluded
+ List of dataset attributes to be excluded.
include_lonlats (bool):
- Always include latitude and longitude coordinates, even for datasets with area definition
+ Always include latitude and longitude coordinates, even for datasets with area definition.
pretty (bool):
Don't modify coordinate names, if possible. Makes the file prettier, but possibly less consistent.
include_orig_name (bool).
- Include the original dataset name as an varaibel attribute in the final netcdf
+ Include the original dataset name as a variable attribute in the final netCDF.
numeric_name_prefix (str):
Prefix to add the each variable with name starting with a digit. Use '' or None to leave this out.
@@ -881,55 +1133,58 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None,
logger.info('Saving datasets to NetCDF4/CF.')
_check_backend_versions()
- # Write global attributes to file root (creates the file)
+ # Define netCDF filename if not provided
+ # - It infers the name from the first DataArray
filename = filename or self.get_filename(**datasets[0].attrs)
- root = xr.Dataset({}, attrs={})
- if header_attrs is not None:
- if flatten_attrs:
- header_attrs = flatten_dict(header_attrs)
- root.attrs = encode_attrs_nc(header_attrs)
-
- _set_history(root)
-
+ # Collect xr.Dataset for each group
+ grouped_datasets, header_attrs = collect_cf_datasets(list_dataarrays=datasets, # list of xr.DataArray
+ header_attrs=header_attrs,
+ exclude_attrs=exclude_attrs,
+ flatten_attrs=flatten_attrs,
+ pretty=pretty,
+ include_lonlats=include_lonlats,
+ epoch=epoch,
+ include_orig_name=include_orig_name,
+ numeric_name_prefix=numeric_name_prefix,
+ groups=groups,
+ )
# Remove satpy-specific kwargs
- to_netcdf_kwargs = copy.deepcopy(to_netcdf_kwargs) # may contain dictionaries (encoding)
- satpy_kwargs = ['overlay', 'decorate', 'config_files']
- for kwarg in satpy_kwargs:
- to_netcdf_kwargs.pop(kwarg, None)
-
- init_nc_kwargs = to_netcdf_kwargs.copy()
- init_nc_kwargs.pop('encoding', None) # No variables to be encoded at this point
- init_nc_kwargs.pop('unlimited_dims', None)
-
- groups_ = _get_groups(groups, datasets, root)
-
- written = [root.to_netcdf(filename, engine=engine, mode='w', **init_nc_kwargs)]
-
- # Write datasets to groups (appending to the file; group=None means no group)
- for group_name, group_datasets in groups_.items():
- # XXX: Should we combine the info of all datasets?
- datas, start_times, end_times = self._collect_datasets(
- group_datasets, epoch=epoch, flatten_attrs=flatten_attrs, exclude_attrs=exclude_attrs,
- include_lonlats=include_lonlats, pretty=pretty,
- include_orig_name=include_orig_name, numeric_name_prefix=numeric_name_prefix)
- dataset = xr.Dataset(datas)
- if 'time' in dataset:
- dataset['time_bnds'] = make_time_bounds(start_times,
- end_times)
- dataset['time'].attrs['bounds'] = "time_bnds"
- dataset['time'].attrs['standard_name'] = "time"
- else:
- grp_str = ' of group {}'.format(group_name) if group_name is not None else ''
- logger.warning('No time dimension in datasets{}, skipping time bounds creation.'.format(grp_str))
-
- encoding, other_to_netcdf_kwargs = update_encoding(dataset, to_netcdf_kwargs, numeric_name_prefix)
- res = dataset.to_netcdf(filename, engine=engine, group=group_name, mode='a', encoding=encoding,
- **other_to_netcdf_kwargs)
+ # - This kwargs can contain encoding dictionary
+ to_netcdf_kwargs = _sanitize_writer_kwargs(to_netcdf_kwargs)
+
+ # If writing grouped netCDF, create an empty "root" netCDF file
+ # - Add the global attributes
+ # - All groups will be appended in the for loop below
+ if groups is not None:
+ written = _initialize_root_netcdf(filename=filename,
+ engine=engine,
+ header_attrs=header_attrs,
+ to_netcdf_kwargs=to_netcdf_kwargs)
+ mode = "a"
+ else:
+ mode = "w"
+ written = []
+
+ # Write the netCDF
+ # - If grouped netCDF, it appends to the root file
+ # - If single netCDF, it write directly
+ for group_name, ds in grouped_datasets.items():
+ encoding, other_to_netcdf_kwargs = update_encoding(ds,
+ to_netcdf_kwargs=to_netcdf_kwargs,
+ numeric_name_prefix=numeric_name_prefix)
+ res = ds.to_netcdf(filename,
+ engine=engine,
+ group=group_name,
+ mode=mode,
+ encoding=encoding,
+ **other_to_netcdf_kwargs)
written.append(res)
-
return written
+# --------------------------------------------------------------------------.
+# NetCDF version
+
def _check_backend_versions():
"""Issue warning if backend versions do not match."""
diff --git a/setup.py b/setup.py
index 2ad639c6fa..2e6154ea92 100644
--- a/setup.py
+++ b/setup.py
@@ -39,7 +39,7 @@
'rasterio', 'geoviews', 'trollimage', 'fsspec', 'bottleneck',
'rioxarray', 'pytest', 'pytest-lazy-fixture', 'defusedxml',
's3fs', 'eccodes', 'h5netcdf', 'xarray-datatree',
- 'skyfield', 'ephem', 'pint-xarray', 'astropy']
+ 'skyfield', 'ephem', 'pint-xarray', 'astropy', 'dask-image']
extras_require = {
# Readers:
@@ -67,6 +67,7 @@
'hsaf_grib': ['pygrib'],
'remote_reading': ['fsspec'],
'insat_3d': ['xarray-datatree'],
+ 'gms5-vissr_l1b': ["numba"],
# Writers:
'cf': ['h5netcdf >= 0.7.3'],
'awips_tiled': ['netCDF4 >= 1.1.8'],
@@ -76,6 +77,7 @@
# Composites/Modifiers:
'rayleigh': ['pyspectral >= 0.10.1'],
'angles': ['pyorbital >= 1.3.1'],
+ 'filters': ['dask-image'],
# MultiScene:
'animations': ['imageio'],
# Documentation: